vendor/assets/javascripts/vega-lite.js in vega-0.1.3 vs vendor/assets/javascripts/vega-lite.js in vega-0.2.0

- old
+ new

@@ -1,291 +1,187 @@ (function (global, factory) { - typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : - typeof define === 'function' && define.amd ? define(['exports'], factory) : - (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.vegaLite = {})); -}(this, (function (exports) { 'use strict'; + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('vega-util'), require('vega')) : + typeof define === 'function' && define.amd ? define(['exports', 'vega-util', 'vega'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.vegaLite = {}, global.vega, global.vega)); +}(this, (function (exports, vegaUtil, vega) { 'use strict'; - var version = "4.17.0"; - - function accessor(fn, fields, name) { - fn.fields = fields || []; - fn.fname = name; - return fn; - } - - function getter(path) { - return path.length === 1 ? get1(path[0]) : getN(path); - } - - const get1 = field => function (obj) { - return obj[field]; + var name = "vega-lite"; + var author = "Dominik Moritz, Kanit \"Ham\" Wongsuphasawat, Arvind Satyanarayan, Jeffrey Heer"; + var version$1 = "5.1.1"; + var collaborators = [ + "Kanit Wongsuphasawat (http://kanitw.yellowpigz.com)", + "Dominik Moritz (https://www.domoritz.de)", + "Arvind Satyanarayan (https://arvindsatya.com)", + "Jeffrey Heer (https://jheer.org)" + ]; + var homepage = "https://vega.github.io/vega-lite/"; + var description$1 = "Vega-Lite is a concise high-level language for interactive visualization."; + var main$1 = "build/vega-lite.js"; + var unpkg = "build/vega-lite.min.js"; + var jsdelivr = "build/vega-lite.min.js"; + var module = "build/src/index"; + var types = "build/src/index.d.ts"; + var bin = { + vl2png: "./bin/vl2png", + vl2svg: "./bin/vl2svg", + vl2pdf: "./bin/vl2pdf", + vl2vg: "./bin/vl2vg" }; - - const getN = path => { - const len = path.length; - return function (obj) { - for (let i = 0; i < len; ++i) { - obj = obj[path[i]]; - } - - return obj; - }; + var directories = { + test: "test" }; + var files = [ + "bin", + "build", + "src", + "vega-lite*", + "tsconfig.json" + ]; + var scripts = { + changelog: "conventional-changelog -p angular -r 2", + prebuild: "yarn clean:build", + build: "yarn build:only", + "build:only": "tsc -p tsconfig.build.json && rollup -c", + "prebuild:examples": "yarn build:only", + "build:examples": "yarn data && TZ=America/Los_Angeles scripts/build-examples.sh", + "prebuild:examples-full": "yarn build:only", + "build:examples-full": "TZ=America/Los_Angeles scripts/build-examples.sh 1", + "build:example": "TZ=America/Los_Angeles scripts/build-example.sh", + "build:toc": "yarn build:jekyll && scripts/generate-toc", + "build:site": "rollup -c site/rollup.config.js", + "build:jekyll": "pushd site && bundle exec jekyll build -q && popd", + "build:versions": "scripts/update-version.sh", + clean: "yarn clean:build && del-cli 'site/data/*' 'examples/compiled/*.png' && find site/examples ! -name 'index.md' ! -name 'data' -type f -delete", + "clean:build": "del-cli 'build/*' !build/vega-lite-schema.json", + "predeploy:site": "yarn presite", + "deploy:site": "gh-pages -d site", + data: "rsync -r node_modules/vega-datasets/data/* site/data", + schema: "mkdir -p build && ts-json-schema-generator -f tsconfig.json -p src/index.ts -t TopLevelSpec --no-type-check --no-ref-encode > build/vega-lite-schema.json && yarn renameschema && cp build/vega-lite-schema.json site/_data/", + renameschema: "scripts/rename-schema.sh", + presite: "yarn data && yarn schema && yarn build:site && yarn build:versions && scripts/create-example-pages.sh", + site: "yarn site:only", + "site:only": "pushd site && bundle exec jekyll serve -I -l && popd", + prettierbase: "prettier '**/*.{md,css,yml}'", + eslintbase: "eslint .", + format: "yarn eslintbase --fix && yarn prettierbase --write", + lint: "yarn eslintbase && yarn prettierbase --check", + jest: "NODE_OPTIONS=--experimental-vm-modules npx jest", + test: "yarn jest test/ && yarn lint && yarn schema && yarn jest examples/ && yarn test:runtime", + "test:cover": "yarn jest --collectCoverage test/", + "test:inspect": "node --inspect-brk --experimental-vm-modules ./node_modules/.bin/jest --runInBand test", + "test:runtime": "NODE_OPTIONS=--experimental-vm-modules TZ=America/Los_Angeles npx jest test-runtime/ --config test-runtime/jest-config.json", + "test:runtime:generate": "yarn build:only && del-cli test-runtime/resources && VL_GENERATE_TESTS=true yarn test:runtime", + watch: "tsc -p tsconfig.build.json -w", + "watch:site": "yarn build:site -w", + "watch:test": "yarn jest --watch test/" + }; + var repository = { + type: "git", + url: "https://github.com/vega/vega-lite.git" + }; + var license = "BSD-3-Clause"; + var bugs = { + url: "https://github.com/vega/vega-lite/issues" + }; + var devDependencies = { + "@babel/core": "^7.15.0", + "@babel/preset-env": "^7.15.0", + "@babel/preset-typescript": "^7.15.0", + "@rollup/plugin-babel": "^5.3.0", + "@rollup/plugin-commonjs": "^20.0.0", + "@rollup/plugin-json": "^4.1.0", + "@rollup/plugin-node-resolve": "^13.0.4", + "@types/chai": "^4.2.21", + "@types/d3": "^7.0.0", + "@types/jest": "^27.0.1", + "@types/mkdirp": "^1.0.2", + "@types/pako": "^1.0.2", + "@typescript-eslint/eslint-plugin": "^4.29.2", + "@typescript-eslint/parser": "^4.29.2", + ajv: "^8.6.2", + "ajv-formats": "^2.1.1", + chai: "^4.3.4", + cheerio: "^1.0.0-rc.10", + "conventional-changelog-cli": "^2.1.1", + d3: "^7.0.1", + "del-cli": "^4.0.1", + eslint: "^7.32.0", + "eslint-config-prettier": "^8.3.0", + "eslint-plugin-jest": "^24.4.0", + "eslint-plugin-prettier": "^4.0.0", + "gh-pages": "^3.2.3", + "highlight.js": "^11.2.0", + jest: "^27.0.6", + "jest-dev-server": "^5.0.3", + mkdirp: "^1.0.4", + pako: "^2.0.4", + prettier: "^2.3.2", + puppeteer: "^10.2.0", + rollup: "^2.56.3", + "rollup-plugin-bundle-size": "^1.0.3", + "rollup-plugin-sourcemaps": "^0.6.3", + "rollup-plugin-terser": "^7.0.2", + serve: "^12.0.0", + terser: "^5.7.1", + "ts-jest": "^27.0.5", + "ts-json-schema-generator": "^0.95.0", + typescript: "~4.4.2", + "vega-cli": "^5.20.2", + "vega-datasets": "~2.2.0", + "vega-embed": "^6.18.2", + "vega-tooltip": "^0.27.0", + "yaml-front-matter": "^4.1.1" + }; + var dependencies = { + "@types/clone": "~2.1.1", + "array-flat-polyfill": "^1.0.1", + clone: "~2.1.2", + "fast-deep-equal": "~3.1.3", + "fast-json-stable-stringify": "~2.1.0", + "json-stringify-pretty-compact": "~3.0.0", + tslib: "~2.3.1", + "vega-event-selector": "~2.0.6", + "vega-expression": "~4.0.1", + "vega-util": "~1.16.1", + yargs: "~17.1.1" + }; + var peerDependencies = { + vega: "^5.20.2" + }; + var engines = { + node: ">=12" + }; + var pkg = { + name: name, + author: author, + version: version$1, + collaborators: collaborators, + homepage: homepage, + description: description$1, + main: main$1, + unpkg: unpkg, + jsdelivr: jsdelivr, + module: module, + types: types, + bin: bin, + directories: directories, + files: files, + scripts: scripts, + repository: repository, + license: license, + bugs: bugs, + devDependencies: devDependencies, + dependencies: dependencies, + peerDependencies: peerDependencies, + engines: engines + }; - function error(message) { - throw Error(message); - } - - function splitAccessPath(p) { - const path = [], - n = p.length; - let q = null, - b = 0, - s = '', - i, - j, - c; - p = p + ''; - - function push() { - path.push(s + p.substring(i, j)); - s = ''; - i = j + 1; - } - - for (i = j = 0; j < n; ++j) { - c = p[j]; - - if (c === '\\') { - s += p.substring(i, j); - s += p.substring(++j, ++j); - i = j; - } else if (c === q) { - push(); - q = null; - b = -1; - } else if (q) { - continue; - } else if (i === b && c === '"') { - i = j + 1; - q = c; - } else if (i === b && c === "'") { - i = j + 1; - q = c; - } else if (c === '.' && !b) { - if (j > i) { - push(); - } else { - i = j + 1; - } - } else if (c === '[') { - if (j > i) push(); - b = i = j + 1; - } else if (c === ']') { - if (!b) error('Access path missing open bracket: ' + p); - if (b > 0) push(); - b = 0; - i = j + 1; - } - } - - if (b) error('Access path missing closing bracket: ' + p); - if (q) error('Access path missing closing quote: ' + p); - - if (j > i) { - j++; - push(); - } - - return path; - } - - function field(field, name, opt) { - const path = splitAccessPath(field); - field = path.length === 1 ? path[0] : field; - return accessor((opt && opt.get || getter)(path), [field], name || field); - } - - const id = field('id'); - const identity = accessor(_ => _, [], 'identity'); - const zero = accessor(() => 0, [], 'zero'); - const one = accessor(() => 1, [], 'one'); - const truthy = accessor(() => true, [], 'true'); - const falsy = accessor(() => false, [], 'false'); - - function log(method, level, input) { - const args = [level].concat([].slice.call(input)); - console[method].apply(console, args); // eslint-disable-line no-console - } - - const None = 0; - const Error$1 = 1; - const Warn = 2; - const Info = 3; - const Debug = 4; - - function logger(_, method) { - let level = _ || None; - return { - level(_) { - if (arguments.length) { - level = +_; - return this; - } else { - return level; - } - }, - - error() { - if (level >= Error$1) log(method || 'error', 'ERROR', arguments); - return this; - }, - - warn() { - if (level >= Warn) log(method || 'warn', 'WARN', arguments); - return this; - }, - - info() { - if (level >= Info) log(method || 'log', 'INFO', arguments); - return this; - }, - - debug() { - if (level >= Debug) log(method || 'log', 'DEBUG', arguments); - return this; - } - - }; - } - - var isArray = Array.isArray; - - function isObject(_) { - return _ === Object(_); - } - - const isLegalKey = key => key !== '__proto__'; - - function mergeConfig(...configs) { - return configs.reduce((out, source) => { - for (const key in source) { - if (key === 'signals') { - // for signals, we merge the signals arrays - // source signals take precedence over - // existing signals with the same name - out.signals = mergeNamed(out.signals, source.signals); - } else { - // otherwise, merge objects subject to recursion constraints - // for legend block, recurse for the layout entry only - // for style block, recurse for all properties - // otherwise, no recursion: objects overwrite, no merging - const r = key === 'legend' ? { - layout: 1 - } : key === 'style' ? true : null; - writeConfig(out, key, source[key], r); - } - } - - return out; - }, {}); - } - - function writeConfig(output, key, value, recurse) { - if (!isLegalKey(key)) return; - let k, o; - - if (isObject(value) && !isArray(value)) { - o = isObject(output[key]) ? output[key] : output[key] = {}; - - for (k in value) { - if (recurse && (recurse === true || recurse[k])) { - writeConfig(o, k, value[k]); - } else if (isLegalKey(k)) { - o[k] = value[k]; - } - } - } else { - output[key] = value; - } - } - - function mergeNamed(a, b) { - if (a == null) return b; - const map = {}, - out = []; - - function add(_) { - if (!map[_.name]) { - map[_.name] = 1; - out.push(_); - } - } - - b.forEach(add); - a.forEach(add); - return out; - } - - function array(_) { - return _ != null ? isArray(_) ? _ : [_] : []; - } - - function isFunction(_) { - return typeof _ === 'function'; - } - - const hop = Object.prototype.hasOwnProperty; - - function has(object, property) { - return hop.call(object, property); - } - - function isBoolean(_) { - return typeof _ === 'boolean'; - } - - function isNumber(_) { - return typeof _ === 'number'; - } - - function isString(_) { - return typeof _ === 'string'; - } - - function $(x) { - return isArray(x) ? '[' + x.map($) + ']' : isObject(x) || isString(x) ? // Output valid JSON and JS source strings. - // See http://timelessrepo.com/json-isnt-a-javascript-subset - JSON.stringify(x).replace('\u2028', '\\u2028').replace('\u2029', '\\u2029') : x; - } - - function toSet(_) { - const s = {}, - n = _.length; - - for (let i = 0; i < n; ++i) s[_[i]] = true; - - return s; - } - Array.prototype.flat||Object.defineProperty(Array.prototype,"flat",{configurable:!0,value:function r(){var t=isNaN(arguments[0])?1:Number(arguments[0]);return t?Array.prototype.reduce.call(this,function(a,e){return Array.isArray(e)?a.push.apply(a,r.call(e,t-1)):a.push(e),a},[]):Array.prototype.slice.call(this)},writable:!0}),Array.prototype.flatMap||Object.defineProperty(Array.prototype,"flatMap",{configurable:!0,value:function(r){return Array.prototype.map.apply(this,arguments).flat()},writable:!0}); - function createCommonjsModule(fn, basedir, module) { - return module = { - path: basedir, - exports: {}, - require: function (path, base) { - return commonjsRequire(path, (base === undefined || base === null) ? module.path : base); - } - }, fn(module, module.exports), module.exports; - } + var clone = {exports: {}}; - function commonjsRequire () { - throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs'); - } - - var clone_1 = createCommonjsModule(function (module) { + (function (module) { var clone = function () { function _instanceof(obj, type) { return type != null && obj instanceof type; } @@ -549,15 +445,17 @@ clone.__getRegExpFlags = __getRegExpFlags; return clone; }(); - if ( module.exports) { + if (module.exports) { module.exports = clone; } - }); + })(clone); + var clone_ = clone.exports; + var fastDeepEqual = function equal(a, b) { if (a === b) return true; if (a && b && typeof a == 'object' && typeof b == 'object') { if (a.constructor !== b.constructor) return false; @@ -703,11 +601,11 @@ return normalizer(op); } } const deepEqual = fastDeepEqual; - const duplicate = clone_1; + const duplicate = clone_; /** * Creates an object composed of the picked object properties. * * var object = {'a': 1, 'b': '2', 'c': 3}; * pick(object, ['a', 'c']); @@ -717,11 +615,11 @@ function pick(obj, props) { const copy = {}; for (const prop of props) { - if (has(obj, prop)) { + if (vegaUtil.hasOwnProperty(obj, prop)) { copy[prop] = obj[prop]; } } return copy; @@ -745,11 +643,11 @@ /** * Monkey patch Set so that `stringify` produces a string representation of sets. */ Set.prototype['toJSON'] = function () { - return "Set(".concat([...this].map(x => fastJsonStableStringify(x)).join(','), ")"); + return `Set(${[...this].map(x => fastJsonStableStringify(x)).join(',')})`; }; /** * Converts any object to a string representation that can be consumed by humans. */ @@ -758,15 +656,15 @@ /** * Converts any object to a string of limited size, or a number. */ function hash(a) { - if (isNumber(a)) { + if (vegaUtil.isNumber(a)) { return a; } - const str = isString(a) ? a : fastJsonStableStringify(a); // short strings can be used as hash directly, longer strings are hashed to reduce memory usage + const str = vegaUtil.isString(a) ? a : fastJsonStableStringify(a); // short strings can be used as hash directly, longer strings are hashed to reduce memory usage if (str.length < 250) { return str; } // from http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/ @@ -783,11 +681,11 @@ } function isNullOrFalse(x) { return x === false || x === null; } function contains(array, item) { - return array.indexOf(item) > -1; + return array.includes(item); } /** * Returns true if any item returns true. */ @@ -824,19 +722,19 @@ /** * recursively merges src into dest */ function mergeDeep(dest, ...src) { for (const s of src) { - deepMerge_(dest, s !== null && s !== void 0 ? s : {}); + deepMerge_(dest, s ?? {}); } return dest; } function deepMerge_(dest, src) { for (const property of keys(src)) { - writeConfig(dest, property, src[property], true); + vegaUtil.writeConfig(dest, property, src[property], true); } } function unique(values, f) { const results = []; @@ -900,13 +798,13 @@ } function prefixGenerator(a) { const prefixes = new Set(); for (const x of a) { - const splitField = splitAccessPath(x); // Wrap every element other than the first in `[]` + const splitField = vegaUtil.splitAccessPath(x); // Wrap every element other than the first in `[]` - const wrappedWithAccessors = splitField.map((y, i) => i === 0 ? y : "[".concat(y, "]")); + const wrappedWithAccessors = splitField.map((y, i) => i === 0 ? y : `[${y}]`); const computedPrefixes = wrappedWithAccessors.map((_, i) => wrappedWithAccessors.slice(0, i + 1).join('')); for (const y of computedPrefixes) { prefixes.add(y); } @@ -931,14 +829,14 @@ return keys(obj).length === 0; } // This is a stricter version of Object.keys but with better types. See https://github.com/Microsoft/TypeScript/pull/12253#issuecomment-263132208 const keys = Object.keys; const vals = Object.values; - const entries = Object.entries; // Using mapped type to declare a collect of flags for a string literal type S + const entries$1 = Object.entries; // Using mapped type to declare a collect of flags for a string literal type S // https://www.typescriptlang.org/docs/handbook/advanced-types.html#mapped-types - function isBoolean$1(b) { + function isBoolean(b) { return b === true || b === false; } /** * Convert a string into a valid variable name */ @@ -949,15 +847,15 @@ return (s.match(/^\d+/) ? '_' : '') + alphanumericS; } function logicalExpr(op, cb) { if (isLogicalNot(op)) { - return '!(' + logicalExpr(op.not, cb) + ')'; + return `!(${logicalExpr(op.not, cb)})`; } else if (isLogicalAnd(op)) { - return '(' + op.and.map(and => logicalExpr(and, cb)).join(') && (') + ')'; + return `(${op.and.map(and => logicalExpr(and, cb)).join(') && (')})`; } else if (isLogicalOr(op)) { - return '(' + op.or.map(or => logicalExpr(or, cb)).join(') || (') + ')'; + return `(${op.or.map(or => logicalExpr(or, cb)).join(') || (')})`; } else { return cb(op); } } /** @@ -985,16 +883,16 @@ * @param path The field name. * @param datum The string to use for `datum`. */ function accessPathWithDatum(path, datum = 'datum') { - const pieces = splitAccessPath(path); + const pieces = vegaUtil.splitAccessPath(path); const prefixes = []; for (let i = 1; i <= pieces.length; i++) { - const prefix = "[".concat(pieces.slice(0, i).map($).join(']['), "]"); - prefixes.push("".concat(datum).concat(prefix)); + const prefix = `[${pieces.slice(0, i).map(vegaUtil.stringValue).join('][')}]`; + prefixes.push(`${datum}${prefix}`); } return prefixes.join(' && '); } /** @@ -1003,11 +901,11 @@ * @param path The field name. * @param datum The string to use for `datum`. */ function flatAccessWithDatum(path, datum = 'datum') { - return "".concat(datum, "[").concat($(splitAccessPath(path).join('.')), "]"); + return `${datum}[${vegaUtil.stringValue(vegaUtil.splitAccessPath(path).join('.'))}]`; } function escapePathAccess(string) { return string.replace(/(\[|\]|\.|'|")/g, '\\$1'); } @@ -1016,11 +914,11 @@ * For example, `foo["bar"].baz` becomes `foo\\.bar\\.baz`. */ function replacePathInField(path) { - return "".concat(splitAccessPath(path).map(escapePathAccess).join('\\.')); + return `${vegaUtil.splitAccessPath(path).map(escapePathAccess).join('\\.')}`; } /** * Replace all occurrences of a string with another string. * * @param string the string to replace in @@ -1035,22 +933,22 @@ * Remove path accesses with access from field. * For example, `foo["bar"].baz` becomes `foo.bar.baz`. */ function removePathFromField(path) { - return "".concat(splitAccessPath(path).join('.')); + return `${vegaUtil.splitAccessPath(path).join('.')}`; } /** * Count the depth of the path. Returns 1 for fields that are not nested. */ function accessPathDepth(path) { if (!path) { return 0; } - return splitAccessPath(path).length; + return vegaUtil.splitAccessPath(path).length; } /** * This is a replacement for chained || for numeric properties or properties that respect null so that 0 will be included. */ @@ -1081,14 +979,14 @@ function resetIdCounter() { idCounter = 42; } function internalField(name) { - return isInternalField(name) ? name : "__".concat(name); + return isInternalField(name) ? name : `__${name}`; } function isInternalField(name) { - return name.indexOf('__') === 0; + return name.startsWith('__'); } /** * Normalize angle to be within [0,360). */ @@ -1102,359 +1000,17 @@ /** * Returns whether the passed in value is a valid number. */ function isNumeric(value) { - if (isNumber(value)) { + if (vegaUtil.isNumber(value)) { return true; } return !isNaN(value) && !isNaN(parseFloat(value)); } - const CONDITIONAL_AXIS_PROP_INDEX = { - labelAlign: { - part: 'labels', - vgProp: 'align' - }, - labelBaseline: { - part: 'labels', - vgProp: 'baseline' - }, - labelColor: { - part: 'labels', - vgProp: 'fill' - }, - labelFont: { - part: 'labels', - vgProp: 'font' - }, - labelFontSize: { - part: 'labels', - vgProp: 'fontSize' - }, - labelFontStyle: { - part: 'labels', - vgProp: 'fontStyle' - }, - labelFontWeight: { - part: 'labels', - vgProp: 'fontWeight' - }, - labelOpacity: { - part: 'labels', - vgProp: 'opacity' - }, - labelOffset: null, - labelPadding: null, - // There is no fixed vgProp for tickSize, need to use signal. - gridColor: { - part: 'grid', - vgProp: 'stroke' - }, - gridDash: { - part: 'grid', - vgProp: 'strokeDash' - }, - gridDashOffset: { - part: 'grid', - vgProp: 'strokeDashOffset' - }, - gridOpacity: { - part: 'grid', - vgProp: 'opacity' - }, - gridWidth: { - part: 'grid', - vgProp: 'strokeWidth' - }, - tickColor: { - part: 'ticks', - vgProp: 'stroke' - }, - tickDash: { - part: 'ticks', - vgProp: 'strokeDash' - }, - tickDashOffset: { - part: 'ticks', - vgProp: 'strokeDashOffset' - }, - tickOpacity: { - part: 'ticks', - vgProp: 'opacity' - }, - tickSize: null, - // There is no fixed vgProp for tickSize, need to use signal. - tickWidth: { - part: 'ticks', - vgProp: 'strokeWidth' - } - }; - function isConditionalAxisValue(v) { - return v && v['condition']; - } - const AXIS_PARTS = ['domain', 'grid', 'labels', 'ticks', 'title']; - /** - * A dictionary listing whether a certain axis property is applicable for only main axes or only grid axes. - */ - - const AXIS_PROPERTY_TYPE = { - grid: 'grid', - gridCap: 'grid', - gridColor: 'grid', - gridDash: 'grid', - gridDashOffset: 'grid', - gridOpacity: 'grid', - gridScale: 'grid', - gridWidth: 'grid', - orient: 'main', - bandPosition: 'both', - // Need to be applied to grid axis too, so the grid will align with ticks. - aria: 'main', - description: 'main', - domain: 'main', - domainCap: 'main', - domainColor: 'main', - domainDash: 'main', - domainDashOffset: 'main', - domainOpacity: 'main', - domainWidth: 'main', - format: 'main', - formatType: 'main', - labelAlign: 'main', - labelAngle: 'main', - labelBaseline: 'main', - labelBound: 'main', - labelColor: 'main', - labelFlush: 'main', - labelFlushOffset: 'main', - labelFont: 'main', - labelFontSize: 'main', - labelFontStyle: 'main', - labelFontWeight: 'main', - labelLimit: 'main', - labelLineHeight: 'main', - labelOffset: 'main', - labelOpacity: 'main', - labelOverlap: 'main', - labelPadding: 'main', - labels: 'main', - labelSeparation: 'main', - maxExtent: 'main', - minExtent: 'main', - offset: 'both', - position: 'main', - tickCap: 'main', - tickColor: 'main', - tickDash: 'main', - tickDashOffset: 'main', - tickMinStep: 'main', - tickOffset: 'both', - // Need to be applied to grid axis too, so the grid will align with ticks. - tickOpacity: 'main', - tickRound: 'both', - // Apply rounding to grid and ticks so they are aligned. - ticks: 'main', - tickSize: 'main', - tickWidth: 'both', - title: 'main', - titleAlign: 'main', - titleAnchor: 'main', - titleAngle: 'main', - titleBaseline: 'main', - titleColor: 'main', - titleFont: 'main', - titleFontSize: 'main', - titleFontStyle: 'main', - titleFontWeight: 'main', - titleLimit: 'main', - titleLineHeight: 'main', - titleOpacity: 'main', - titlePadding: 'main', - titleX: 'main', - titleY: 'main', - encode: 'both', - // we hide this in Vega-Lite - scale: 'both', - tickBand: 'both', - tickCount: 'both', - tickExtra: 'both', - translate: 'both', - values: 'both', - zindex: 'both' // this is actually set afterward, so it doesn't matter - - }; - const COMMON_AXIS_PROPERTIES_INDEX = { - orient: 1, - // other things can depend on orient - aria: 1, - bandPosition: 1, - description: 1, - domain: 1, - domainCap: 1, - domainColor: 1, - domainDash: 1, - domainDashOffset: 1, - domainOpacity: 1, - domainWidth: 1, - format: 1, - formatType: 1, - grid: 1, - gridCap: 1, - gridColor: 1, - gridDash: 1, - gridDashOffset: 1, - gridOpacity: 1, - gridWidth: 1, - labelAlign: 1, - labelAngle: 1, - labelBaseline: 1, - labelBound: 1, - labelColor: 1, - labelFlush: 1, - labelFlushOffset: 1, - labelFont: 1, - labelFontSize: 1, - labelFontStyle: 1, - labelFontWeight: 1, - labelLimit: 1, - labelLineHeight: 1, - labelOffset: 1, - labelOpacity: 1, - labelOverlap: 1, - labelPadding: 1, - labels: 1, - labelSeparation: 1, - maxExtent: 1, - minExtent: 1, - offset: 1, - position: 1, - tickBand: 1, - tickCap: 1, - tickColor: 1, - tickCount: 1, - tickDash: 1, - tickDashOffset: 1, - tickExtra: 1, - tickMinStep: 1, - tickOffset: 1, - tickOpacity: 1, - tickRound: 1, - ticks: 1, - tickSize: 1, - tickWidth: 1, - title: 1, - titleAlign: 1, - titleAnchor: 1, - titleAngle: 1, - titleBaseline: 1, - titleColor: 1, - titleFont: 1, - titleFontSize: 1, - titleFontStyle: 1, - titleFontWeight: 1, - titleLimit: 1, - titleLineHeight: 1, - titleOpacity: 1, - titlePadding: 1, - titleX: 1, - titleY: 1, - translate: 1, - values: 1, - zindex: 1 - }; - const AXIS_PROPERTIES_INDEX = { ...COMMON_AXIS_PROPERTIES_INDEX, - style: 1, - labelExpr: 1, - encoding: 1 - }; - function isAxisProperty(prop) { - return !!AXIS_PROPERTIES_INDEX[prop]; - } // Export for dependent projects - const AXIS_CONFIGS_INDEX = { - axis: 1, - axisBand: 1, - axisBottom: 1, - axisDiscrete: 1, - axisLeft: 1, - axisPoint: 1, - axisQuantitative: 1, - axisRight: 1, - axisTemporal: 1, - axisTop: 1, - axisX: 1, - axisXBand: 1, - axisXDiscrete: 1, - axisXPoint: 1, - axisXQuantitative: 1, - axisXTemporal: 1, - axisY: 1, - axisYBand: 1, - axisYDiscrete: 1, - axisYPoint: 1, - axisYQuantitative: 1, - axisYTemporal: 1 - }; - const AXIS_CONFIGS = keys(AXIS_CONFIGS_INDEX); - - const AGGREGATE_OP_INDEX = { - argmax: 1, - argmin: 1, - average: 1, - count: 1, - distinct: 1, - product: 1, - max: 1, - mean: 1, - median: 1, - min: 1, - missing: 1, - q1: 1, - q3: 1, - ci0: 1, - ci1: 1, - stderr: 1, - stdev: 1, - stdevp: 1, - sum: 1, - valid: 1, - values: 1, - variance: 1, - variancep: 1 - }; - const MULTIDOMAIN_SORT_OP_INDEX = { - count: 1, - min: 1, - max: 1 - }; - function isArgminDef(a) { - return !!a && !!a['argmin']; - } - function isArgmaxDef(a) { - return !!a && !!a['argmax']; - } - function isAggregateOp(a) { - return isString(a) && !!AGGREGATE_OP_INDEX[a]; - } - const COUNTING_OPS = ['count', 'valid', 'missing', 'distinct']; - function isCountingAggregateOp(aggregate) { - return isString(aggregate) && contains(COUNTING_OPS, aggregate); - } - function isMinMaxOp(aggregate) { - return isString(aggregate) && contains(['min', 'max'], aggregate); - } - /** Additive-based aggregation operations. These can be applied to stack. */ - - const SUM_OPS = ['count', 'sum', 'distinct', 'valid', 'missing']; - /** - * Aggregation operators that always produce values within the range [domainMin, domainMax]. - */ - - const SHARED_DOMAIN_OPS = ['mean', 'average', 'median', 'q1', 'q3', 'min', 'max']; - const SHARED_DOMAIN_OP_INDEX = toSet(SHARED_DOMAIN_OPS); - /* * Constants and utilities for encoding channels (Visual variables) * such as 'x', 'y', 'color'. */ // Facet @@ -1487,11 +1043,11 @@ const FILLOPACITY = 'fillOpacity'; const STROKEOPACITY = 'strokeOpacity'; const STROKEWIDTH = 'strokeWidth'; const STROKEDASH = 'strokeDash'; // Non-scale channel - const TEXT = 'text'; + const TEXT$1 = 'text'; const ORDER = 'order'; const DETAIL = 'detail'; const KEY = 'key'; const TOOLTIP = 'tooltip'; const HREF = 'href'; @@ -1670,11 +1226,11 @@ return RADIUS2; } return undefined; } - function getSizeChannel(channel) { + function getSizeChannel$1(channel) { switch (channel) { case X: case X2: return 'width'; @@ -1921,11 +1477,11 @@ return { point: 'always', geoshape: 'always' }; - case TEXT: + case TEXT$1: return { text: 'always' }; case ANGLE: @@ -1983,11 +1539,11 @@ case ROW: case COLUMN: case SHAPE: case STROKEDASH: // TEXT, TOOLTIP, URL, and HREF have no scale but have discrete output [falls through] - case TEXT: + case TEXT$1: case TOOLTIP: case HREF: case URL: case DESCRIPTION: return 'discrete'; @@ -2008,19 +1564,75 @@ case ORDER: return undefined; } } + const AGGREGATE_OP_INDEX = { + argmax: 1, + argmin: 1, + average: 1, + count: 1, + distinct: 1, + product: 1, + max: 1, + mean: 1, + median: 1, + min: 1, + missing: 1, + q1: 1, + q3: 1, + ci0: 1, + ci1: 1, + stderr: 1, + stdev: 1, + stdevp: 1, + sum: 1, + valid: 1, + values: 1, + variance: 1, + variancep: 1 + }; + const MULTIDOMAIN_SORT_OP_INDEX = { + count: 1, + min: 1, + max: 1 + }; + function isArgminDef(a) { + return !!a && !!a['argmin']; + } + function isArgmaxDef(a) { + return !!a && !!a['argmax']; + } + function isAggregateOp(a) { + return vegaUtil.isString(a) && !!AGGREGATE_OP_INDEX[a]; + } + const COUNTING_OPS = ['count', 'valid', 'missing', 'distinct']; + function isCountingAggregateOp(aggregate) { + return vegaUtil.isString(aggregate) && contains(COUNTING_OPS, aggregate); + } + function isMinMaxOp(aggregate) { + return vegaUtil.isString(aggregate) && contains(['min', 'max'], aggregate); + } + /** Additive-based aggregation operations. These can be applied to stack. */ + + const SUM_OPS = ['count', 'sum', 'distinct', 'valid', 'missing']; /** + * Aggregation operators that always produce values within the range [domainMin, domainMax]. + */ + + const SHARED_DOMAIN_OPS = ['mean', 'average', 'median', 'q1', 'q3', 'min', 'max']; + const SHARED_DOMAIN_OP_INDEX = vegaUtil.toSet(SHARED_DOMAIN_OPS); + + /** * Create a key for the bin configuration. Not for prebinned bin. */ function binToString(bin) { - if (isBoolean(bin)) { + if (vegaUtil.isBoolean(bin)) { bin = normalizeBin(bin, undefined); } - return 'bin' + keys(bin).map(p => isSelectionExtent(bin[p]) ? varName("_".concat(p, "_").concat(entries(bin[p]))) : varName("_".concat(p, "_").concat(bin[p]))).join(''); + return 'bin' + keys(bin).map(p => isParameterExtent(bin[p]) ? varName(`_${p}_${entries$1(bin[p])}`) : varName(`_${p}_${bin[p]}`)).join(''); } /** * Vega-Lite should bin the data. */ @@ -2033,14 +1645,14 @@ function isBinned(bin) { return bin === 'binned' || isBinParams(bin) && bin.binned === true; } function isBinParams(bin) { - return isObject(bin); + return vegaUtil.isObject(bin); } - function isSelectionExtent(extent) { - return extent === null || extent === void 0 ? void 0 : extent['selection']; + function isParameterExtent(extent) { + return extent === null || extent === void 0 ? void 0 : extent['param']; } function autoMaxBins(channel) { switch (channel) { case ROW: case COLUMN: @@ -2065,10 +1677,426 @@ default: return 10; } } + function isExprRef(o) { + return o && !!o['expr']; + } + function replaceExprRef(index) { + const props = keys(index || {}); + const newIndex = {}; + + for (const prop of props) { + newIndex[prop] = signalRefOrValue(index[prop]); + } + + return newIndex; + } + + function extractTitleConfig(titleConfig) { + const { + // These are non-mark title config that need to be hardcoded + anchor, + frame, + offset, + orient, + angle, + limit, + // color needs to be redirect to fill + color, + // subtitle properties + subtitleColor, + subtitleFont, + subtitleFontSize, + subtitleFontStyle, + subtitleFontWeight, + subtitleLineHeight, + subtitlePadding, + // The rest are mark config. + ...rest + } = titleConfig; + const titleMarkConfig = { ...rest, + ...(color ? { + fill: color + } : {}) + }; // These are non-mark title config that need to be hardcoded + + const nonMarkTitleProperties = { ...(anchor ? { + anchor + } : {}), + ...(frame ? { + frame + } : {}), + ...(offset ? { + offset + } : {}), + ...(orient ? { + orient + } : {}), + ...(angle !== undefined ? { + angle + } : {}), + ...(limit !== undefined ? { + limit + } : {}) + }; // subtitle part can stay in config.title since header titles do not use subtitle + + const subtitle = { ...(subtitleColor ? { + subtitleColor + } : {}), + ...(subtitleFont ? { + subtitleFont + } : {}), + ...(subtitleFontSize ? { + subtitleFontSize + } : {}), + ...(subtitleFontStyle ? { + subtitleFontStyle + } : {}), + ...(subtitleFontWeight ? { + subtitleFontWeight + } : {}), + ...(subtitleLineHeight ? { + subtitleLineHeight + } : {}), + ...(subtitlePadding ? { + subtitlePadding + } : {}) + }; + const subtitleMarkConfig = pick(titleConfig, ['align', 'baseline', 'dx', 'dy', 'limit']); + return { + titleMarkConfig, + subtitleMarkConfig, + nonMarkTitleProperties: nonMarkTitleProperties, + subtitle + }; + } + function isText(v) { + return vegaUtil.isString(v) || vegaUtil.isArray(v) && vegaUtil.isString(v[0]); + } + + function isSignalRef(o) { + return o && !!o['signal']; + } // TODO: add type of value (Make it VgValueRef<V extends ValueOrGradient> {value?:V ...}) + + function isVgRangeStep(range) { + return !!range['step']; + } + function isDataRefUnionedDomain(domain) { + if (!vegaUtil.isArray(domain)) { + return 'fields' in domain && !('data' in domain); + } + + return false; + } + function isFieldRefUnionDomain(domain) { + if (!vegaUtil.isArray(domain)) { + return 'fields' in domain && 'data' in domain; + } + + return false; + } + function isDataRefDomain(domain) { + if (!vegaUtil.isArray(domain)) { + return 'field' in domain && 'data' in domain; + } + + return false; + } + const VG_MARK_CONFIG_INDEX = { + aria: 1, + description: 1, + ariaRole: 1, + ariaRoleDescription: 1, + blend: 1, + opacity: 1, + fill: 1, + fillOpacity: 1, + stroke: 1, + strokeCap: 1, + strokeWidth: 1, + strokeOpacity: 1, + strokeDash: 1, + strokeDashOffset: 1, + strokeJoin: 1, + strokeOffset: 1, + strokeMiterLimit: 1, + startAngle: 1, + endAngle: 1, + padAngle: 1, + innerRadius: 1, + outerRadius: 1, + size: 1, + shape: 1, + interpolate: 1, + tension: 1, + orient: 1, + align: 1, + baseline: 1, + text: 1, + dir: 1, + dx: 1, + dy: 1, + ellipsis: 1, + limit: 1, + radius: 1, + theta: 1, + angle: 1, + font: 1, + fontSize: 1, + fontWeight: 1, + fontStyle: 1, + lineBreak: 1, + lineHeight: 1, + cursor: 1, + href: 1, + tooltip: 1, + cornerRadius: 1, + cornerRadiusTopLeft: 1, + cornerRadiusTopRight: 1, + cornerRadiusBottomLeft: 1, + cornerRadiusBottomRight: 1, + aspect: 1, + width: 1, + height: 1, + url: 1, + smooth: 1 // commented below are vg channel that do not have mark config. + // x: 1, + // y: 1, + // x2: 1, + // y2: 1, + // xc'|'yc' + // clip: 1, + // path: 1, + // url: 1, + + }; + const VG_MARK_CONFIGS = keys(VG_MARK_CONFIG_INDEX); + const VG_MARK_INDEX = { + arc: 1, + area: 1, + group: 1, + image: 1, + line: 1, + path: 1, + rect: 1, + rule: 1, + shape: 1, + symbol: 1, + text: 1, + trail: 1 + }; // Vega's cornerRadius channels. + + const VG_CORNERRADIUS_CHANNELS = ['cornerRadius', 'cornerRadiusTopLeft', 'cornerRadiusTopRight', 'cornerRadiusBottomLeft', 'cornerRadiusBottomRight']; + + function signalOrValueRefWithCondition(val) { + const condition = vegaUtil.isArray(val.condition) ? val.condition.map(conditionalSignalRefOrValue) : conditionalSignalRefOrValue(val.condition); + return { ...signalRefOrValue(val), + condition + }; + } + function signalRefOrValue(value) { + if (isExprRef(value)) { + const { + expr, + ...rest + } = value; + return { + signal: expr, + ...rest + }; + } + + return value; + } + function conditionalSignalRefOrValue(value) { + if (isExprRef(value)) { + const { + expr, + ...rest + } = value; + return { + signal: expr, + ...rest + }; + } + + return value; + } + function signalOrValueRef(value) { + if (isExprRef(value)) { + const { + expr, + ...rest + } = value; + return { + signal: expr, + ...rest + }; + } + + if (isSignalRef(value)) { + return value; + } + + return value !== undefined ? { + value + } : undefined; + } + function exprFromValueOrSignalRef(ref) { + if (isSignalRef(ref)) { + return ref.signal; + } + + return vegaUtil.stringValue(ref.value); + } + function signalOrStringValue(v) { + if (isSignalRef(v)) { + return v.signal; + } + + return v == null ? null : vegaUtil.stringValue(v); + } + function applyMarkConfig(e, model, propsList) { + for (const property of propsList) { + const value = getMarkConfig(property, model.markDef, model.config); + + if (value !== undefined) { + e[property] = signalOrValueRef(value); + } + } + + return e; + } + function getStyles(mark) { + return [].concat(mark.type, mark.style ?? []); + } + function getMarkPropOrConfig(channel, mark, config, opt = {}) { + const { + vgChannel, + ignoreVgConfig + } = opt; + + if (vgChannel && mark[vgChannel] !== undefined) { + return mark[vgChannel]; + } else if (mark[channel] !== undefined) { + return mark[channel]; + } else if (ignoreVgConfig && (!vgChannel || vgChannel === channel)) { + return undefined; + } + + return getMarkConfig(channel, mark, config, opt); + } + /** + * Return property value from style or mark specific config property if exists. + * Otherwise, return general mark specific config. + */ + + function getMarkConfig(channel, mark, config, { + vgChannel + } = {}) { + return getFirstDefined( // style config has highest precedence + vgChannel ? getMarkStyleConfig(channel, mark, config.style) : undefined, getMarkStyleConfig(channel, mark, config.style), // then mark-specific config + vgChannel ? config[mark.type][vgChannel] : undefined, config[mark.type][channel], // Need to cast because MarkDef doesn't perfectly match with AnyMarkConfig, but if the type isn't available, we'll get nothing here, which is fine + // If there is vgChannel, skip vl channel. + // For example, vl size for text is vg fontSize, but config.mark.size is only for point size. + vgChannel ? config.mark[vgChannel] : config.mark[channel] // Need to cast for the same reason as above + ); + } + function getMarkStyleConfig(prop, mark, styleConfigIndex) { + return getStyleConfig(prop, getStyles(mark), styleConfigIndex); + } + function getStyleConfig(p, styles, styleConfigIndex) { + styles = vegaUtil.array(styles); + let value; + + for (const style of styles) { + const styleConfig = styleConfigIndex[style]; + + if (styleConfig && styleConfig[p] !== undefined) { + value = styleConfig[p]; + } + } + + return value; + } + /** + * Return Vega sort parameters (tuple of field and order). + */ + + function sortParams(orderDef, fieldRefOption) { + return vegaUtil.array(orderDef).reduce((s, orderChannelDef) => { + s.field.push(vgField(orderChannelDef, fieldRefOption)); + s.order.push(orderChannelDef.sort ?? 'ascending'); + return s; + }, { + field: [], + order: [] + }); + } + function mergeTitleFieldDefs(f1, f2) { + const merged = [...f1]; + f2.forEach(fdToMerge => { + for (const fieldDef1 of merged) { + // If already exists, no need to append to merged array + if (deepEqual(fieldDef1, fdToMerge)) { + return; + } + } + + merged.push(fdToMerge); + }); + return merged; + } + function mergeTitle(title1, title2) { + if (deepEqual(title1, title2) || !title2) { + // if titles are the same or title2 is falsy + return title1; + } else if (!title1) { + // if title1 is falsy + return title2; + } else { + return [...vegaUtil.array(title1), ...vegaUtil.array(title2)].join(', '); + } + } + function mergeTitleComponent(v1, v2) { + const v1Val = v1.value; + const v2Val = v2.value; + + if (v1Val == null || v2Val === null) { + return { + explicit: v1.explicit, + value: null + }; + } else if ((isText(v1Val) || isSignalRef(v1Val)) && (isText(v2Val) || isSignalRef(v2Val))) { + return { + explicit: v1.explicit, + value: mergeTitle(v1Val, v2Val) + }; + } else if (isText(v1Val) || isSignalRef(v1Val)) { + return { + explicit: v1.explicit, + value: v1Val + }; + } else if (isText(v2Val) || isSignalRef(v2Val)) { + return { + explicit: v1.explicit, + value: v2Val + }; + } else if (!isText(v1Val) && !isSignalRef(v1Val) && !isText(v2Val) && !isSignalRef(v2Val)) { + return { + explicit: v1.explicit, + value: mergeTitleFieldDefs(v1Val, v2Val) + }; + } + /* istanbul ignore next: Condition should not happen -- only for warning in development. */ + + + throw new Error('It should never reach here'); + } + function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, @@ -2081,262 +2109,281 @@ return obj; } function _classPrivateFieldGet(receiver, privateMap) { - var descriptor = privateMap.get(receiver); + var descriptor = _classExtractFieldDescriptor(receiver, privateMap, "get"); - if (!descriptor) { - throw new TypeError("attempted to get private field on non-instance"); + return _classApplyDescriptorGet(receiver, descriptor); + } + + function _classPrivateFieldSet(receiver, privateMap, value) { + var descriptor = _classExtractFieldDescriptor(receiver, privateMap, "set"); + + _classApplyDescriptorSet(receiver, descriptor, value); + + return value; + } + + function _classExtractFieldDescriptor(receiver, privateMap, action) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to " + action + " private field on non-instance"); } + return privateMap.get(receiver); + } + + function _classApplyDescriptorGet(receiver, descriptor) { if (descriptor.get) { return descriptor.get.call(receiver); } return descriptor.value; } - function _classPrivateFieldSet(receiver, privateMap, value) { - var descriptor = privateMap.get(receiver); - - if (!descriptor) { - throw new TypeError("attempted to set private field on non-instance"); - } - + function _classApplyDescriptorSet(receiver, descriptor, value) { if (descriptor.set) { descriptor.set.call(receiver, value); } else { if (!descriptor.writable) { throw new TypeError("attempted to set read only private field"); } descriptor.value = value; } - - return value; } /** * Collection of all Vega-Lite Error Messages */ function invalidSpec(spec) { - return "Invalid specification ".concat(JSON.stringify(spec), ". Make sure the specification includes at least one of the following properties: \"mark\", \"layer\", \"facet\", \"hconcat\", \"vconcat\", \"concat\", or \"repeat\"."); + return `Invalid specification ${stringify(spec)}. Make sure the specification includes at least one of the following properties: "mark", "layer", "facet", "hconcat", "vconcat", "concat", or "repeat".`; } // FIT const FIT_NON_SINGLE = 'Autosize "fit" only works for single views and layered views.'; function containerSizeNonSingle(name) { const uName = name == 'width' ? 'Width' : 'Height'; - return "".concat(uName, " \"container\" only works for single views and layered views."); + return `${uName} "container" only works for single views and layered views.`; } function containerSizeNotCompatibleWithAutosize(name) { const uName = name == 'width' ? 'Width' : 'Height'; const fitDirection = name == 'width' ? 'x' : 'y'; - return "".concat(uName, " \"container\" only works well with autosize \"fit\" or \"fit-").concat(fitDirection, "\"."); + return `${uName} "container" only works well with autosize "fit" or "fit-${fitDirection}".`; } function droppingFit(channel) { - return channel ? "Dropping \"fit-".concat(channel, "\" because spec has discrete ").concat(getSizeChannel(channel), ".") : "Dropping \"fit\" because spec has discrete size."; + return channel ? `Dropping "fit-${channel}" because spec has discrete ${getSizeChannel$1(channel)}.` : `Dropping "fit" because spec has discrete size.`; } // VIEW SIZE function unknownField(channel) { - return "Unknown field for ".concat(channel, ". Cannot calculate view size."); + return `Unknown field for ${channel}. Cannot calculate view size.`; } // SELECTION function cannotProjectOnChannelWithoutField(channel) { - return "Cannot project a selection on encoding channel \"".concat(channel, "\", which has no field."); + return `Cannot project a selection on encoding channel "${channel}", which has no field.`; } function cannotProjectAggregate(channel, aggregate) { - return "Cannot project a selection on encoding channel \"".concat(channel, "\" as it uses an aggregate function (\"").concat(aggregate, "\")."); + return `Cannot project a selection on encoding channel "${channel}" as it uses an aggregate function ("${aggregate}").`; } function nearestNotSupportForContinuous(mark) { - return "The \"nearest\" transform is not supported for ".concat(mark, " marks."); + return `The "nearest" transform is not supported for ${mark} marks.`; } function selectionNotSupported(mark) { - return "Selection not supported for ".concat(mark, " yet."); + return `Selection not supported for ${mark} yet.`; } function selectionNotFound(name) { - return "Cannot find a selection named \"".concat(name, "\"."); + return `Cannot find a selection named "${name}".`; } const SCALE_BINDINGS_CONTINUOUS = 'Scale bindings are currently only supported for scales with unbinned, continuous domains.'; const LEGEND_BINDINGS_MUST_HAVE_PROJECTION = 'Legend bindings are only supported for selections over an individual field or encoding channel.'; + function cannotLookupVariableParameter(name) { + return `Lookups can only be performed on selection parameters. "${name}" is a variable parameter.`; + } function noSameUnitLookup(name) { - return "Cannot define and lookup the \"".concat(name, "\" selection in the same view. ") + "Try moving the lookup into a second, layered view?"; + return `Cannot define and lookup the "${name}" selection in the same view. ` + `Try moving the lookup into a second, layered view?`; } const NEEDS_SAME_SELECTION = 'The same selection must be used to override scale domains in a layered view.'; const INTERVAL_INITIALIZED_WITH_X_Y = 'Interval selections should be initialized using "x" and/or "y" keys.'; // REPEAT function noSuchRepeatedValue(field) { - return "Unknown repeated value \"".concat(field, "\"."); + return `Unknown repeated value "${field}".`; } function columnsNotSupportByRowCol(type) { - return "The \"columns\" property cannot be used when \"".concat(type, "\" has nested row/column."); + return `The "columns" property cannot be used when "${type}" has nested row/column.`; } // CONCAT / REPEAT const CONCAT_CANNOT_SHARE_AXIS = 'Axes cannot be shared in concatenated or repeated views yet (https://github.com/vega/vega-lite/issues/2415).'; // DATA function unrecognizedParse(p) { - return "Unrecognized parse \"".concat(p, "\"."); + return `Unrecognized parse "${p}".`; } function differentParse(field, local, ancestor) { - return "An ancestor parsed field \"".concat(field, "\" as ").concat(ancestor, " but a child wants to parse the field as ").concat(local, "."); + return `An ancestor parsed field "${field}" as ${ancestor} but a child wants to parse the field as ${local}.`; } const ADD_SAME_CHILD_TWICE = 'Attempt to add the same child twice.'; // TRANSFORMS function invalidTransformIgnored(transform) { - return "Ignoring an invalid transform: ".concat(stringify(transform), "."); + return `Ignoring an invalid transform: ${stringify(transform)}.`; } const NO_FIELDS_NEEDS_AS = 'If "from.fields" is not specified, "as" has to be a string that specifies the key to be used for the data from the secondary source.'; // ENCODING & FACET function customFormatTypeNotAllowed(channel) { - return "Config.customFormatTypes is not true, thus custom format type and format for channel ".concat(channel, " are dropped."); + return `Config.customFormatTypes is not true, thus custom format type and format for channel ${channel} are dropped.`; } function projectionOverridden(opt) { const { parentProjection, projection } = opt; - return "Layer's shared projection ".concat(stringify(parentProjection), " is overridden by a child projection ").concat(stringify(projection), "."); + return `Layer's shared projection ${stringify(parentProjection)} is overridden by a child projection ${stringify(projection)}.`; } const REPLACE_ANGLE_WITH_THETA = 'Arc marks uses theta channel rather than angle, replacing angle with theta.'; function primitiveChannelDef(channel, type, value) { - return "Channel ".concat(channel, " is a ").concat(type, ". Converted to {value: ").concat(stringify(value), "}."); + return `Channel ${channel} is a ${type}. Converted to {value: ${stringify(value)}}.`; } function invalidFieldType(type) { - return "Invalid field type \"".concat(type, "\"."); + return `Invalid field type "${type}".`; } function invalidFieldTypeForCountAggregate(type, aggregate) { - return "Invalid field type \"".concat(type, "\" for aggregate: \"").concat(aggregate, "\", using \"quantitative\" instead."); + return `Invalid field type "${type}" for aggregate: "${aggregate}", using "quantitative" instead.`; } function invalidAggregate(aggregate) { - return "Invalid aggregation operator \"".concat(aggregate, "\"."); + return `Invalid aggregation operator "${aggregate}".`; } function droppingColor(type, opt) { const { fill, stroke } = opt; - return "Dropping color ".concat(type, " as the plot also has ").concat(fill && stroke ? 'fill and stroke' : fill ? 'fill' : 'stroke', "."); + return `Dropping color ${type} as the plot also has ${fill && stroke ? 'fill and stroke' : fill ? 'fill' : 'stroke'}.`; } + function relativeBandSizeNotSupported(sizeChannel) { + return `Position range does not support relative band size for ${sizeChannel}.`; + } function emptyFieldDef(fieldDef, channel) { - return "Dropping ".concat(stringify(fieldDef), " from channel \"").concat(channel, "\" since it does not contain any data field, datum, value, or signal."); + return `Dropping ${stringify(fieldDef)} from channel "${channel}" since it does not contain any data field, datum, value, or signal.`; } const LINE_WITH_VARYING_SIZE = 'Line marks cannot encode size with a non-groupby field. You may want to use trail marks instead.'; function incompatibleChannel(channel, markOrFacet, when) { - return "".concat(channel, " dropped as it is incompatible with \"").concat(markOrFacet, "\"").concat(when ? " when ".concat(when) : '', "."); + return `${channel} dropped as it is incompatible with "${markOrFacet}"${when ? ` when ${when}` : ''}.`; } function invalidEncodingChannel(channel) { - return "".concat(channel, "-encoding is dropped as ").concat(channel, " is not a valid encoding channel."); + return `${channel}-encoding is dropped as ${channel} is not a valid encoding channel.`; } - function facetChannelShouldBeDiscrete(channel) { - return "".concat(channel, " encoding should be discrete (ordinal / nominal / binned)."); + function channelShouldBeDiscrete(channel) { + return `${channel} encoding should be discrete (ordinal / nominal / binned).`; } + function channelShouldBeDiscreteOrDiscretizing(channel) { + return `${channel} encoding should be discrete (ordinal / nominal / binned) or use a discretizing scale (e.g. threshold).`; + } function facetChannelDropped(channels) { - return "Facet encoding dropped as ".concat(channels.join(' and '), " ").concat(channels.length > 1 ? 'are' : 'is', " also specified."); + return `Facet encoding dropped as ${channels.join(' and ')} ${channels.length > 1 ? 'are' : 'is'} also specified.`; } function discreteChannelCannotEncode(channel, type) { - return "Using discrete channel \"".concat(channel, "\" to encode \"").concat(type, "\" field can be misleading as it does not encode ").concat(type === 'ordinal' ? 'order' : 'magnitude', "."); + return `Using discrete channel "${channel}" to encode "${type}" field can be misleading as it does not encode ${type === 'ordinal' ? 'order' : 'magnitude'}.`; } // MARK function rangeMarkAlignmentCannotBeExpression(align) { - return "The ".concat(align, " for range marks cannot be an expression"); + return `The ${align} for range marks cannot be an expression`; } function lineWithRange(hasX2, hasY2) { const channels = hasX2 && hasY2 ? 'x2 and y2' : hasX2 ? 'x2' : 'y2'; - return "Line mark is for continuous lines and thus cannot be used with ".concat(channels, ". We will use the rule mark (line segments) instead."); + return `Line mark is for continuous lines and thus cannot be used with ${channels}. We will use the rule mark (line segments) instead.`; } function orientOverridden(original, actual) { - return "Specified orient \"".concat(original, "\" overridden with \"").concat(actual, "\"."); + return `Specified orient "${original}" overridden with "${actual}".`; } // SCALE - const RANGE_STEP_DEPRECATED = "Scale's \"rangeStep\" is deprecated and will be removed in Vega-Lite 5.0. Please use \"width\"/\"height\": {\"step\": ...} instead. See https://vega.github.io/vega-lite/docs/size.html."; function cannotUseScalePropertyWithNonColor(prop) { - return "Cannot use the scale property \"".concat(prop, "\" with non-color channel."); + return `Cannot use the scale property "${prop}" with non-color channel.`; } + function cannotUseRelativeBandSizeWithNonBandScale(scaleType) { + return `Cannot use the relative band size with ${scaleType} scale.`; + } function unaggregateDomainHasNoEffectForRawField(fieldDef) { - return "Using unaggregated domain with raw field has no effect (".concat(stringify(fieldDef), ")."); + return `Using unaggregated domain with raw field has no effect (${stringify(fieldDef)}).`; } function unaggregateDomainWithNonSharedDomainOp(aggregate) { - return "Unaggregated domain not applicable for \"".concat(aggregate, "\" since it produces values outside the origin domain of the source data."); + return `Unaggregated domain not applicable for "${aggregate}" since it produces values outside the origin domain of the source data.`; } function unaggregatedDomainWithLogScale(fieldDef) { - return "Unaggregated domain is currently unsupported for log scale (".concat(stringify(fieldDef), ")."); + return `Unaggregated domain is currently unsupported for log scale (${stringify(fieldDef)}).`; } function cannotApplySizeToNonOrientedMark(mark) { - return "Cannot apply size to non-oriented mark \"".concat(mark, "\"."); + return `Cannot apply size to non-oriented mark "${mark}".`; } function scaleTypeNotWorkWithChannel(channel, scaleType, defaultScaleType) { - return "Channel \"".concat(channel, "\" does not work with \"").concat(scaleType, "\" scale. We are using \"").concat(defaultScaleType, "\" scale instead."); + return `Channel "${channel}" does not work with "${scaleType}" scale. We are using "${defaultScaleType}" scale instead.`; } function scaleTypeNotWorkWithFieldDef(scaleType, defaultScaleType) { - return "FieldDef does not work with \"".concat(scaleType, "\" scale. We are using \"").concat(defaultScaleType, "\" scale instead."); + return `FieldDef does not work with "${scaleType}" scale. We are using "${defaultScaleType}" scale instead.`; } function scalePropertyNotWorkWithScaleType(scaleType, propName, channel) { - return "".concat(channel, "-scale's \"").concat(propName, "\" is dropped as it does not work with ").concat(scaleType, " scale."); + return `${channel}-scale's "${propName}" is dropped as it does not work with ${scaleType} scale.`; } function stepDropped(channel) { - return "The step for \"".concat(channel, "\" is dropped because the ").concat(channel === 'width' ? 'x' : 'y', " is continuous."); + return `The step for "${channel}" is dropped because the ${channel === 'width' ? 'x' : 'y'} is continuous.`; } function mergeConflictingProperty(property, propertyOf, v1, v2) { - return "Conflicting ".concat(propertyOf.toString(), " property \"").concat(property.toString(), "\" (").concat(stringify(v1), " and ").concat(stringify(v2), "). Using ").concat(stringify(v1), "."); + return `Conflicting ${propertyOf.toString()} property "${property.toString()}" (${stringify(v1)} and ${stringify(v2)}). Using ${stringify(v1)}.`; } function mergeConflictingDomainProperty(property, propertyOf, v1, v2) { - return "Conflicting ".concat(propertyOf.toString(), " property \"").concat(property.toString(), "\" (").concat(stringify(v1), " and ").concat(stringify(v2), "). Using the union of the two domains."); + return `Conflicting ${propertyOf.toString()} property "${property.toString()}" (${stringify(v1)} and ${stringify(v2)}). Using the union of the two domains.`; } function independentScaleMeansIndependentGuide(channel) { - return "Setting the scale to be independent for \"".concat(channel, "\" means we also have to set the guide (axis or legend) to be independent."); + return `Setting the scale to be independent for "${channel}" means we also have to set the guide (axis or legend) to be independent.`; } function domainSortDropped(sort) { - return "Dropping sort property ".concat(stringify(sort), " as unioned domains only support boolean or op \"count\", \"min\", and \"max\"."); + return `Dropping sort property ${stringify(sort)} as unioned domains only support boolean or op "count", "min", and "max".`; } const MORE_THAN_ONE_SORT = 'Domains that should be unioned has conflicting sort properties. Sort will be set to true.'; const FACETED_INDEPENDENT_DIFFERENT_SOURCES = 'Detected faceted independent scales that union domain of multiple fields from different data sources. We will use the first field. The result view size may be incorrect.'; const FACETED_INDEPENDENT_SAME_FIELDS_DIFFERENT_SOURCES = 'Detected faceted independent scales that union domain of the same fields from different source. We will assume that this is the same field from a different fork of the same data source. However, if this is not the case, the result view size may be incorrect.'; const FACETED_INDEPENDENT_SAME_SOURCE = 'Detected faceted independent scales that union domain of multiple fields from the same data source. We will use the first field. The result view size may be incorrect.'; // AXIS function cannotStackRangedMark(channel) { - return "Cannot stack \"".concat(channel, "\" if there is already \"").concat(channel, "2\"."); + return `Cannot stack "${channel}" if there is already "${channel}2".`; } function cannotStackNonLinearScale(scaleType) { - return "Cannot stack non-linear scale (".concat(scaleType, ")."); + return `Cannot stack non-linear scale (${scaleType}).`; } function stackNonSummativeAggregate(aggregate) { - return "Stacking is applied even though the aggregate function is non-summative (\"".concat(aggregate, "\")."); + return `Stacking is applied even though the aggregate function is non-summative ("${aggregate}").`; } // TIMEUNIT function invalidTimeUnit(unitName, value) { - return "Invalid ".concat(unitName, ": ").concat(stringify(value), "."); + return `Invalid ${unitName}: ${stringify(value)}.`; } function droppedDay(d) { - return "Dropping day from datetime ".concat(stringify(d), " as day cannot be combined with other units."); + return `Dropping day from datetime ${stringify(d)} as day cannot be combined with other units.`; } function errorBarCenterAndExtentAreNotNeeded(center, extent) { - return "".concat(extent ? 'extent ' : '').concat(extent && center ? 'and ' : '').concat(center ? 'center ' : '').concat(extent && center ? 'are ' : 'is ', "not needed when data are aggregated."); + return `${extent ? 'extent ' : ''}${extent && center ? 'and ' : ''}${center ? 'center ' : ''}${extent && center ? 'are ' : 'is '}not needed when data are aggregated.`; } function errorBarCenterIsUsedWithWrongExtent(center, extent, mark) { - return "".concat(center, " is not usually used with ").concat(extent, " for ").concat(mark, "."); + return `${center} is not usually used with ${extent} for ${mark}.`; } function errorBarContinuousAxisHasCustomizedAggregate(aggregate, compositeMark) { - return "Continuous axis should not have customized aggregation function ".concat(aggregate, "; ").concat(compositeMark, " already agregates the axis."); + return `Continuous axis should not have customized aggregation function ${aggregate}; ${compositeMark} already agregates the axis.`; } function errorBand1DNotSupport(property) { - return "1D error band does not support ".concat(property, "."); + return `1D error band does not support ${property}.`; } // CHANNEL function channelRequiredForBinned(channel) { - return "Channel ".concat(channel, " is required for \"binned\" bin."); + return `Channel ${channel} is required for "binned" bin.`; } function channelShouldNotBeUsedForBinned(channel) { - return "Channel ".concat(channel, " should not be used with \"binned\" bin."); + return `Channel ${channel} should not be used with "binned" bin.`; } function domainRequiredForThresholdScale(channel) { - return "Domain for ".concat(channel, " is required for threshold scale."); + return `Domain for ${channel} is required for threshold scale.`; } /** * Main (default) Vega Logger instance for Vega-Lite. */ - const main = logger(Warn); + const main = vegaUtil.logger(vegaUtil.Warn); let current = main; /** * Set the singleton logger to be a custom logger. */ @@ -2365,11 +2412,11 @@ * @maximum 12 * @TJS-type integer */ function isDateTime(o) { - if (o && isObject(o)) { + if (o && vegaUtil.isObject(o)) { for (const part of TIMEUNIT_PARTS) { if (part in o) { return true; } } @@ -2385,11 +2432,11 @@ function normalizeQuarter(q) { if (isNumeric(q)) { q = +q; } - if (isNumber(q)) { + if (vegaUtil.isNumber(q)) { if (q > 4) { warn(invalidTimeUnit('quarter', q)); } // We accept 1-based quarter, so need to readjust to 0-based quarter @@ -2403,11 +2450,11 @@ function normalizeMonth(m) { if (isNumeric(m)) { m = +m; } - if (isNumber(m)) { + if (vegaUtil.isNumber(m)) { // We accept 1-based month, so need to readjust to 0-based month return m - 1; } else { const lowerM = m.toLowerCase(); const monthIndex = MONTHS.indexOf(lowerM); @@ -2431,11 +2478,11 @@ function normalizeDay(d) { if (isNumeric(d)) { d = +d; } - if (isNumber(d)) { + if (vegaUtil.isNumber(d)) { // mod so that this can be both 0-based where 0 = sunday // and 1-based where 7=sunday return d % 7; } else { const lowerD = d.toLowerCase(); @@ -2485,22 +2532,22 @@ if (d.month !== undefined) { const month = normalize ? normalizeMonth(d.month) : d.month; parts.push(month); } else if (d.quarter !== undefined) { const quarter = normalize ? normalizeQuarter(d.quarter) : d.quarter; - parts.push(isNumber(quarter) ? quarter * 3 : quarter + '*3'); + parts.push(vegaUtil.isNumber(quarter) ? quarter * 3 : `${quarter}*3`); } else { parts.push(0); // months start at zero in JS } if (d.date !== undefined) { parts.push(d.date); } else if (d.day !== undefined) { // HACK: Day only works as a standalone unit // This is only correct because we always set year to 2006 for day const day = normalize ? normalizeDay(d.day) : d.day; - parts.push(isNumber(day) ? day + 1 : day + '+1'); + parts.push(vegaUtil.isNumber(day) ? day + 1 : `${day}+1`); } else { parts.push(1); // Date starts at 1 in JS } // Note: can't use TimeUnit enum here as importing it will create // circular dependency problem! @@ -2523,13 +2570,13 @@ function dateTimeToExpr(d) { const parts = dateTimeParts(d, true); const string = parts.join(', '); if (d.utc) { - return "utc(".concat(string, ")"); + return `utc(${string})`; } else { - return "datetime(".concat(string, ")"); + return `datetime(${string})`; } } /** * Return Vega expression for a date time expression. * @@ -2540,13 +2587,13 @@ function dateTimeExprToExpr(d) { const parts = dateTimeParts(d, false); const string = parts.join(', '); if (d.utc) { - return "utc(".concat(string, ")"); + return `utc(${string})`; } else { - return "datetime(".concat(string, ")"); + return `datetime(${string})`; } } /** * @param d the date time. * @returns the timestamp. @@ -2591,19 +2638,11 @@ const VEGALITE_TIMEFORMAT = { 'year-month': '%b %Y ', 'year-month-date': '%b %d, %Y ' }; function getTimeUnitParts(timeUnit) { - const parts = []; - - for (const part of TIMEUNIT_PARTS) { - if (containsTimeUnit(timeUnit, part)) { - parts.push(part); - } - } - - return parts; + return TIMEUNIT_PARTS.filter(part => containsTimeUnit(timeUnit, part)); } /** Returns true if fullTimeUnit contains the timeUnit, false otherwise. */ function containsTimeUnit(fullTimeUnit, timeUnit) { const index = fullTimeUnit.indexOf(timeUnit); @@ -2641,13 +2680,13 @@ const utc = isUTCTimeUnit(fullTimeUnit) ? 'utc' : ''; function func(timeUnit) { if (timeUnit === 'quarter') { // quarter starting at 0 (0,3,6,9). - return "(".concat(utc, "quarter(").concat(fieldRef, ")-1)"); + return `(${utc}quarter(${fieldRef})-1)`; } else { - return "".concat(utc).concat(timeUnit, "(").concat(fieldRef, ")"); + return `${utc}${timeUnit}(${fieldRef})`; } } let lastTimeUnit; const dateExpr = {}; @@ -2669,11 +2708,11 @@ if (!timeUnit) { return undefined; } const timeUnitParts = getTimeUnitParts(timeUnit); - return "timeUnitSpecifier(".concat(fastJsonStableStringify(timeUnitParts), ", ").concat(fastJsonStableStringify(VEGALITE_TIMEFORMAT), ")"); + return `timeUnitSpecifier(${stringify(timeUnitParts)}, ${stringify(VEGALITE_TIMEFORMAT)})`; } /** * Returns the signal expression used for axis labels for a time unit. */ @@ -2685,24 +2724,24 @@ const expr = timeUnitSpecifierExpression(timeUnit); // We only use utcFormat for utc scale // For utc time units, the data is already converted as a part of timeUnit transform. // Thus, utc time units should use timeFormat to avoid shifting the time twice. const utc = isUTCScale || isUTCTimeUnit(timeUnit); - return "".concat(utc ? 'utc' : 'time', "Format(").concat(field, ", ").concat(expr, ")"); + return `${utc ? 'utc' : 'time'}Format(${field}, ${expr})`; } function normalizeTimeUnit(timeUnit) { if (!timeUnit) { return undefined; } let params; - if (isString(timeUnit)) { + if (vegaUtil.isString(timeUnit)) { params = { unit: timeUnit }; - } else if (isObject(timeUnit)) { + } else if (vegaUtil.isObject(timeUnit)) { params = { ...timeUnit, ...(timeUnit.unit ? { unit: timeUnit.unit } : {}) }; @@ -2720,133 +2759,19 @@ utc, ...rest } = normalizeTimeUnit(tu); if (rest.unit) { - return (utc ? 'utc' : '') + keys(rest).map(p => varName("".concat(p === 'unit' ? '' : "_".concat(p, "_")).concat(rest[p]))).join(''); + return (utc ? 'utc' : '') + keys(rest).map(p => varName(`${p === 'unit' ? '' : `_${p}_`}${rest[p]}`)).join(''); } else { // when maxbins is specified instead of units - return (utc ? 'utc' : '') + 'timeunit' + keys(rest).map(p => varName("_".concat(p, "_").concat(rest[p]))).join(''); + return (utc ? 'utc' : '') + 'timeunit' + keys(rest).map(p => varName(`_${p}_${rest[p]}`)).join(''); } } - function isSignalRef(o) { - return o && !!o['signal']; - } // TODO: add type of value (Make it VgValueRef<V extends ValueOrGradient> {value?:V ...}) - - function isVgRangeStep(range) { - return !!range['step']; - } - function isDataRefUnionedDomain(domain) { - if (!isArray(domain)) { - return 'fields' in domain && !('data' in domain); - } - - return false; - } - function isFieldRefUnionDomain(domain) { - if (!isArray(domain)) { - return 'fields' in domain && 'data' in domain; - } - - return false; - } - function isDataRefDomain(domain) { - if (!isArray(domain)) { - return 'field' in domain && 'data' in domain; - } - - return false; - } - const VG_MARK_CONFIG_INDEX = { - aria: 1, - description: 1, - ariaRole: 1, - ariaRoleDescription: 1, - blend: 1, - opacity: 1, - fill: 1, - fillOpacity: 1, - stroke: 1, - strokeCap: 1, - strokeWidth: 1, - strokeOpacity: 1, - strokeDash: 1, - strokeDashOffset: 1, - strokeJoin: 1, - strokeOffset: 1, - strokeMiterLimit: 1, - startAngle: 1, - endAngle: 1, - padAngle: 1, - innerRadius: 1, - outerRadius: 1, - size: 1, - shape: 1, - interpolate: 1, - tension: 1, - orient: 1, - align: 1, - baseline: 1, - text: 1, - dir: 1, - dx: 1, - dy: 1, - ellipsis: 1, - limit: 1, - radius: 1, - theta: 1, - angle: 1, - font: 1, - fontSize: 1, - fontWeight: 1, - fontStyle: 1, - lineBreak: 1, - lineHeight: 1, - cursor: 1, - href: 1, - tooltip: 1, - cornerRadius: 1, - cornerRadiusTopLeft: 1, - cornerRadiusTopRight: 1, - cornerRadiusBottomLeft: 1, - cornerRadiusBottomRight: 1, - aspect: 1, - width: 1, - height: 1, - url: 1, - smooth: 1 // commented below are vg channel that do not have mark config. - // x: 1, - // y: 1, - // x2: 1, - // y2: 1, - // xc'|'yc' - // clip: 1, - // path: 1, - // url: 1, - - }; - const VG_MARK_CONFIGS = keys(VG_MARK_CONFIG_INDEX); - const VG_MARK_INDEX = { - arc: 1, - area: 1, - group: 1, - image: 1, - line: 1, - path: 1, - rect: 1, - rule: 1, - shape: 1, - symbol: 1, - text: 1, - trail: 1 - }; // Vega's cornerRadius channels. - - const VG_CORNERRADIUS_CHANNELS = ['cornerRadius', 'cornerRadiusTopLeft', 'cornerRadiusTopRight', 'cornerRadiusBottomLeft', 'cornerRadiusBottomRight']; - function isSelectionPredicate(predicate) { - return predicate === null || predicate === void 0 ? void 0 : predicate['selection']; + return predicate === null || predicate === void 0 ? void 0 : predicate['param']; } function isFieldEqualPredicate(predicate) { return predicate && !!predicate.field && predicate.equal !== undefined; } function isFieldLTPredicate(predicate) { @@ -2860,22 +2785,22 @@ } function isFieldGTEPredicate(predicate) { return predicate && !!predicate.field && predicate.gte !== undefined; } function isFieldRangePredicate(predicate) { - if (predicate && predicate.field) { - if (isArray(predicate.range) && predicate.range.length === 2) { + if (predicate !== null && predicate !== void 0 && predicate.field) { + if (vegaUtil.isArray(predicate.range) && predicate.range.length === 2) { return true; } else if (isSignalRef(predicate.range)) { return true; } } return false; } function isFieldOneOfPredicate(predicate) { - return predicate && !!predicate.field && (isArray(predicate.oneOf) || isArray(predicate.in)) // backward compatibility + return predicate && !!predicate.field && (vegaUtil.isArray(predicate.oneOf) || vegaUtil.isArray(predicate.in)) // backward compatibility ; } function isFieldValidPredicate(predicate) { return predicate && !!predicate.field && predicate.valid !== undefined; } @@ -2903,72 +2828,72 @@ } = predicate; const timeUnit = (_normalizeTimeUnit = normalizeTimeUnit(predicate.timeUnit)) === null || _normalizeTimeUnit === void 0 ? void 0 : _normalizeTimeUnit.unit; const fieldExpr$1 = timeUnit ? // For timeUnit, cast into integer with time() so we can use ===, inrange, indexOf to compare values directly. // TODO: We calculate timeUnit on the fly here. Consider if we would like to consolidate this with timeUnit pipeline // TODO: support utc - 'time(' + fieldExpr(timeUnit, field) + ')' : vgField(predicate, { + `time(${fieldExpr(timeUnit, field)})` : vgField(predicate, { expr: 'datum' }); if (isFieldEqualPredicate(predicate)) { - return fieldExpr$1 + '===' + predicateValueExpr(predicate.equal, timeUnit); + return `${fieldExpr$1}===${predicateValueExpr(predicate.equal, timeUnit)}`; } else if (isFieldLTPredicate(predicate)) { const upper = predicate.lt; - return "".concat(fieldExpr$1, "<").concat(predicateValueExpr(upper, timeUnit)); + return `${fieldExpr$1}<${predicateValueExpr(upper, timeUnit)}`; } else if (isFieldGTPredicate(predicate)) { const lower = predicate.gt; - return "".concat(fieldExpr$1, ">").concat(predicateValueExpr(lower, timeUnit)); + return `${fieldExpr$1}>${predicateValueExpr(lower, timeUnit)}`; } else if (isFieldLTEPredicate(predicate)) { const upper = predicate.lte; - return "".concat(fieldExpr$1, "<=").concat(predicateValueExpr(upper, timeUnit)); + return `${fieldExpr$1}<=${predicateValueExpr(upper, timeUnit)}`; } else if (isFieldGTEPredicate(predicate)) { const lower = predicate.gte; - return "".concat(fieldExpr$1, ">=").concat(predicateValueExpr(lower, timeUnit)); + return `${fieldExpr$1}>=${predicateValueExpr(lower, timeUnit)}`; } else if (isFieldOneOfPredicate(predicate)) { - return "indexof([".concat(predicateValuesExpr(predicate.oneOf, timeUnit).join(','), "], ").concat(fieldExpr$1, ") !== -1"); + return `indexof([${predicateValuesExpr(predicate.oneOf, timeUnit).join(',')}], ${fieldExpr$1}) !== -1`; } else if (isFieldValidPredicate(predicate)) { return fieldValidPredicate(fieldExpr$1, predicate.valid); } else if (isFieldRangePredicate(predicate)) { const { range } = predicate; const lower = isSignalRef(range) ? { - signal: "".concat(range.signal, "[0]") + signal: `${range.signal}[0]` } : range[0]; const upper = isSignalRef(range) ? { - signal: "".concat(range.signal, "[1]") + signal: `${range.signal}[1]` } : range[1]; if (lower !== null && upper !== null && useInRange) { return 'inrange(' + fieldExpr$1 + ', [' + predicateValueExpr(lower, timeUnit) + ', ' + predicateValueExpr(upper, timeUnit) + '])'; } const exprs = []; if (lower !== null) { - exprs.push("".concat(fieldExpr$1, " >= ").concat(predicateValueExpr(lower, timeUnit))); + exprs.push(`${fieldExpr$1} >= ${predicateValueExpr(lower, timeUnit)}`); } if (upper !== null) { - exprs.push("".concat(fieldExpr$1, " <= ").concat(predicateValueExpr(upper, timeUnit))); + exprs.push(`${fieldExpr$1} <= ${predicateValueExpr(upper, timeUnit)}`); } return exprs.length > 0 ? exprs.join(' && ') : 'true'; } /* istanbul ignore next: it should never reach here */ - throw new Error("Invalid field predicate: ".concat(JSON.stringify(predicate))); + throw new Error(`Invalid field predicate: ${stringify(predicate)}`); } function fieldValidPredicate(fieldExpr, valid = true) { if (valid) { - return "isValid(".concat(fieldExpr, ") && isFinite(+").concat(fieldExpr, ")"); + return `isValid(${fieldExpr}) && isFinite(+${fieldExpr})`; } else { - return "!isValid(".concat(fieldExpr, ") || !isFinite(+").concat(fieldExpr, ")"); + return `!isValid(${fieldExpr}) || !isFinite(+${fieldExpr})`; } } - function normalizePredicate(f) { + function normalizePredicate$1(f) { if (isFieldPredicate(f) && f.timeUnit) { var _normalizeTimeUnit2; return { ...f, timeUnit: (_normalizeTimeUnit2 = normalizeTimeUnit(f.timeUnit)) === null || _normalizeTimeUnit2 === void 0 ? void 0 : _normalizeTimeUnit2.unit @@ -3118,22 +3043,22 @@ function scaleTypePrecedence(scaleType) { return SCALE_PRECEDENCE_INDEX[scaleType]; } const CONTINUOUS_TO_CONTINUOUS_SCALES = ['linear', 'log', 'pow', 'sqrt', 'symlog', 'time', 'utc']; - const CONTINUOUS_TO_CONTINUOUS_INDEX = toSet(CONTINUOUS_TO_CONTINUOUS_SCALES); + const CONTINUOUS_TO_CONTINUOUS_INDEX = vegaUtil.toSet(CONTINUOUS_TO_CONTINUOUS_SCALES); const QUANTITATIVE_SCALES = ['linear', 'log', 'pow', 'sqrt', 'symlog']; - const QUANTITATIVE_SCALES_INDEX = toSet(QUANTITATIVE_SCALES); + const QUANTITATIVE_SCALES_INDEX = vegaUtil.toSet(QUANTITATIVE_SCALES); function isQuantitative(type) { return type in QUANTITATIVE_SCALES_INDEX; } const CONTINUOUS_TO_DISCRETE_SCALES = ['quantile', 'quantize', 'threshold']; - const CONTINUOUS_TO_DISCRETE_INDEX = toSet(CONTINUOUS_TO_DISCRETE_SCALES); + const CONTINUOUS_TO_DISCRETE_INDEX = vegaUtil.toSet(CONTINUOUS_TO_DISCRETE_SCALES); const CONTINUOUS_DOMAIN_SCALES = CONTINUOUS_TO_CONTINUOUS_SCALES.concat(['quantile', 'quantize', 'threshold', 'sequential', 'identity']); - const CONTINUOUS_DOMAIN_INDEX = toSet(CONTINUOUS_DOMAIN_SCALES); + const CONTINUOUS_DOMAIN_INDEX = vegaUtil.toSet(CONTINUOUS_DOMAIN_SCALES); const DISCRETE_DOMAIN_SCALES = ['ordinal', 'bin-ordinal', 'point', 'band']; - const DISCRETE_DOMAIN_INDEX = toSet(DISCRETE_DOMAIN_SCALES); + const DISCRETE_DOMAIN_INDEX = vegaUtil.toSet(DISCRETE_DOMAIN_SCALES); function hasDiscreteDomain(type) { return type in DISCRETE_DOMAIN_INDEX; } function hasContinuousDomain(type) { return type in CONTINUOUS_DOMAIN_INDEX; @@ -3160,18 +3085,21 @@ maxStrokeWidth: 4, quantileCount: 4, quantizeCount: 4 }; function isExtendedScheme(scheme) { - return !isString(scheme) && !!scheme['name']; + return !vegaUtil.isString(scheme) && !!scheme['name']; } - function isSelectionDomain(domain) { - return domain === null || domain === void 0 ? void 0 : domain['selection']; + function isParameterDomain(domain) { + return domain === null || domain === void 0 ? void 0 : domain['param']; } function isDomainUnionWith(domain) { return domain && domain['unionWith']; } + function isFieldRange(range) { + return vega.isObject(range) && 'field' in range; + } const SCALE_PROPERTY_INDEX = { type: 1, domain: 1, domainMax: 1, domainMin: 1, @@ -3200,11 +3128,11 @@ paddingInner: 1, paddingOuter: 1 }; const { type, - domain, + domain: domain$1, range, rangeMax, rangeMin, scheme, ...NON_TYPE_DOMAIN_RANGE_VEGA_SCALE_PROPERTY_INDEX @@ -3218,26 +3146,26 @@ case 'range': return true; case 'scheme': case 'interpolate': - return !contains(['point', 'band', 'identity'], scaleType); + return !['point', 'band', 'identity'].includes(scaleType); case 'bins': - return !contains(['point', 'band', 'identity', 'ordinal'], scaleType); + return !['point', 'band', 'identity', 'ordinal'].includes(scaleType); case 'round': return isContinuousToContinuous(scaleType) || scaleType === 'band' || scaleType === 'point'; case 'padding': case 'rangeMin': case 'rangeMax': - return isContinuousToContinuous(scaleType) || contains(['point', 'band'], scaleType); + return isContinuousToContinuous(scaleType) || ['point', 'band'].includes(scaleType); case 'paddingOuter': case 'align': - return contains(['point', 'band'], scaleType); + return ['point', 'band'].includes(scaleType); case 'paddingInner': return scaleType === 'band'; case 'domainMax': @@ -3344,32 +3272,15 @@ case STROKE: return scaleType !== 'band'; // band does not make sense with color case STROKEDASH: - return scaleType === 'ordinal' || isContinuousToDiscrete(scaleType); - case SHAPE: - return scaleType === 'ordinal'; - // shape = lookup only + return scaleType === 'ordinal' || isContinuousToDiscrete(scaleType); } } - function isExprRef(o) { - return o && !!o['expr']; - } - function replaceExprRefInIndex(index) { - const props = keys(index || {}); - const newIndex = {}; - - for (const prop of props) { - newIndex[prop] = signalRefOrValue(index[prop]); - } - - return newIndex; - } - /** * All types of primitive marks. */ const Mark = { arc: 'arc', @@ -3393,40 +3304,40 @@ const IMAGE = Mark.image; const LINE = Mark.line; const POINT = Mark.point; const RECT = Mark.rect; const RULE = Mark.rule; - const TEXT$1 = Mark.text; + const TEXT = Mark.text; const TICK = Mark.tick; const TRAIL = Mark.trail; const CIRCLE = Mark.circle; const SQUARE = Mark.square; const GEOSHAPE = Mark.geoshape; function isPathMark(m) { - return contains(['line', 'area', 'trail'], m); + return ['line', 'area', 'trail'].includes(m); } function isRectBasedMark(m) { - return contains(['rect', 'bar', 'image', 'arc' + return ['rect', 'bar', 'image', 'arc' /* arc is rect/interval in polar coordinate */ - ], m); + ].includes(m); } const PRIMITIVE_MARKS = keys(Mark); function isMarkDef(mark) { return mark['type']; } - const PRIMITIVE_MARK_INDEX = toSet(PRIMITIVE_MARKS); + vegaUtil.toSet(PRIMITIVE_MARKS); const STROKE_CONFIG = ['stroke', 'strokeWidth', 'strokeDash', 'strokeDashOffset', 'strokeOpacity', 'strokeJoin', 'strokeMiterLimit']; const FILL_CONFIG = ['fill', 'fillOpacity']; const FILL_STROKE_CONFIG = [...STROKE_CONFIG, ...FILL_CONFIG]; const VL_ONLY_MARK_CONFIG_INDEX = { color: 1, filled: 1, invalid: 1, order: 1, radius2: 1, theta2: 1, - timeUnitBand: 1, + timeUnitBandSize: 1, timeUnitBandPosition: 1 }; const VL_ONLY_MARK_CONFIG_PROPERTIES = keys(VL_ONLY_MARK_CONFIG_INDEX); const VL_ONLY_MARK_SPECIFIC_CONFIG_PROPERTY_INDEX = { area: ['line', 'point'], @@ -3436,11 +3347,11 @@ tick: ['bandSize', 'thickness'] }; const defaultMarkConfig = { color: '#4c78a8', invalid: 'filter', - timeUnitBand: 1 + timeUnitBandSize: 1 }; // TODO: replace with MarkConfigMixins[Mark] once https://github.com/vega/ts-json-schema-generator/issues/344 is fixed const MARK_CONFIG_INDEX = { mark: 1, arc: 1, @@ -3457,10 +3368,13 @@ tick: 1, trail: 1, geoshape: 1 }; const MARK_CONFIGS = keys(MARK_CONFIG_INDEX); + function isRelativeBandSize(o) { + return o && o['band'] != undefined; + } const BAR_CORNER_RADIUS_INDEX = { horizontal: ['cornerRadiusTopRight', 'cornerRadiusBottomRight'], vertical: ['cornerRadiusTopLeft', 'cornerRadiusTopRight'] }; const DEFAULT_RECT_BAND_SIZE = 5; @@ -3545,11 +3459,11 @@ test, ...zeroValueRef }; } function fieldInvalidPredicate(field, invalid = true) { - return fieldValidPredicate(isString(field) ? field : vgField(field, { + return fieldValidPredicate(vegaUtil.isString(field) ? field : vgField(field, { expr: 'datum' }), !invalid); } function datumDefToExpr(datumDef) { const { @@ -3558,11 +3472,11 @@ if (isDateTime(datum)) { return dateTimeToExpr(datum); } - return "".concat(JSON.stringify(datum)); + return `${stringify(datum)}`; } function valueRefForFieldOrDatumDef(fieldDef, scaleName, opt, encode) { const ref = {}; if (scaleName) { @@ -3612,13 +3526,13 @@ scaleName, fieldOrDatumDef, fieldOrDatumDef2, offset, startSuffix, - band = 0.5 + bandPosition = 0.5 }) { - const expr = 0 < band && band < 1 ? 'datum' : undefined; + const expr = 0 < bandPosition && bandPosition < 1 ? 'datum' : undefined; const start = vgField(fieldOrDatumDef, { expr, suffix: startSuffix }); const end = fieldOrDatumDef2 !== undefined ? vgField(fieldOrDatumDef2, { @@ -3627,17 +3541,17 @@ suffix: 'end', expr }); const ref = {}; - if (band === 0 || band === 1) { + if (bandPosition === 0 || bandPosition === 1) { ref.scale = scaleName; - const val = band === 0 ? start : end; + const val = bandPosition === 0 ? start : end; ref.field = val; } else { - const datum = "".concat(band, " * ").concat(start, " + ").concat(1 - band, " * ").concat(end); - ref.signal = "scale(\"".concat(scaleName, "\", ").concat(datum, ")"); + const datum = isSignalRef(bandPosition) ? `${bandPosition.signal} * ${start} + (1-${bandPosition.signal}) * ${end}` : `${bandPosition} * ${start} + ${1 - bandPosition} * ${end}`; + ref.signal = `scale("${scaleName}", ${datum})`; } if (offset) { ref.offset = offset; } @@ -3657,55 +3571,48 @@ scaleName, scale, stack, offset, defaultRef, - band + bandPosition }) { // TODO: datum support if (channelDef) { /* istanbul ignore else */ if (isFieldOrDatumDef(channelDef)) { - var _ref, _band2; - if (isTypedFieldDef(channelDef)) { - var _band; - - band = (_band = band) !== null && _band !== void 0 ? _band : getBand({ - channel, + bandPosition ?? (bandPosition = getBandPosition({ fieldDef: channelDef, fieldDef2: channel2Def, markDef, - stack, - config, - isMidPoint: true - }); + config + })); const { bin, timeUnit, type } = channelDef; - if (isBinning(bin) || band && timeUnit && type === TEMPORAL) { + if (isBinning(bin) || bandPosition && timeUnit && type === TEMPORAL) { // Use middle only for x an y to place marks in the center between start and end of the bin range. // We do not use the mid point for other channels (e.g. size) so that properties of legends and marks match. - if (stack && stack.impute) { + if (stack !== null && stack !== void 0 && stack.impute) { // For stack, we computed bin_mid so we can impute. return valueRefForFieldOrDatumDef(channelDef, scaleName, { binSuffix: 'mid' }, { offset }); } - if (band) { + if (bandPosition) { // if band = 0, no need to call interpolation // For non-stack, we can just calculate bin mid on the fly using signal. return interpolatedSignalRef({ scaleName, fieldOrDatumDef: channelDef, - band, + bandPosition, offset }); } return valueRefForFieldOrDatumDef(channelDef, scaleName, binRequiresRange(channelDef, channel) ? { @@ -3717,11 +3624,11 @@ if (isFieldDef(channel2Def)) { return interpolatedSignalRef({ scaleName, fieldOrDatumDef: channelDef, fieldOrDatumDef2: channel2Def, - band, + bandPosition, offset }); } else { const channel2 = channel === X ? X2 : Y2; warn(channelRequiredForBinned(channel2)); @@ -3734,11 +3641,11 @@ binSuffix: 'range' } : {}, // no need for bin suffix if there is no scale { offset, // For band, to get mid point, need to offset by half of the band - band: scaleType === 'band' ? (_ref = (_band2 = band) !== null && _band2 !== void 0 ? _band2 : channelDef.band) !== null && _ref !== void 0 ? _ref : 0.5 : undefined + band: scaleType === 'band' ? bandPosition ?? channelDef.bandPosition ?? 0.5 : undefined }); } else if (isValueDef(channelDef)) { const value = channelDef.value; const offsetMixins = offset ? { offset @@ -3749,11 +3656,11 @@ } // If channelDef is neither field def or value def, it's a condition-only def. // In such case, we will use default ref. } - if (isFunction(defaultRef)) { + if (vegaUtil.isFunction(defaultRef)) { defaultRef = defaultRef(); } if (defaultRef) { // for non-position, ref could be undefined. @@ -3792,11 +3699,11 @@ function isCustomFormatType(formatType) { return formatType && formatType !== 'number' && formatType !== 'time'; } function customFormatExpr(formatType, field, format) { - return "".concat(formatType, "(").concat(field).concat(format ? ", ".concat(JSON.stringify(format)) : '', ")"); + return `${formatType}(${field}${format ? `, ${stringify(format)}` : ''})`; } const BIN_RANGE_DELIMITER = ' \u2013 '; function formatSignalRef({ fieldOrDatumDef, @@ -3837,29 +3744,29 @@ return { signal: binFormatExpression(field, endField, format, formatType, config) }; } else if (format || channelDefType(fieldOrDatumDef) === 'quantitative') { return { - signal: "".concat(formatExpr(field, format)) + signal: `${formatExpr(field, format)}` }; } else { return { - signal: "isValid(".concat(field, ") ? ").concat(field, " : \"\"+").concat(field) + signal: `isValid(${field}) ? ${field} : ""+${field}` }; } } function fieldToFormat(fieldOrDatumDef, expr, normalizeStack) { if (isFieldDef(fieldOrDatumDef)) { if (normalizeStack) { - return "".concat(vgField(fieldOrDatumDef, { - expr, - suffix: 'end' - }), "-").concat(vgField(fieldOrDatumDef, { - expr, - suffix: 'start' - })); + return `${vgField(fieldOrDatumDef, { + expr, + suffix: 'end' + })}-${vgField(fieldOrDatumDef, { + expr, + suffix: 'start' + })}`; } else { return vgField(fieldOrDatumDef, { expr }); } @@ -3875,14 +3782,12 @@ expr, normalizeStack, config, field }) { - var _field; + field ?? (field = fieldToFormat(fieldOrDatumDef, expr, normalizeStack)); - field = (_field = field) !== null && _field !== void 0 ? _field : fieldToFormat(fieldOrDatumDef, expr, normalizeStack); - if (isFieldDef(fieldOrDatumDef) && isBinning(fieldOrDatumDef.bin)) { const endField = vgField(fieldOrDatumDef, { expr, binSuffix: 'end' }); @@ -3893,12 +3798,11 @@ return { signal: customFormatExpr(formatType, field, format) }; } - function guideFormat(fieldOrDatumDef, type, format, formatType, config, omitTimeFormatConfig) // axis doesn't use config.timeFormat - { + function guideFormat(fieldOrDatumDef, type, format, formatType, config, omitTimeFormatConfig) { if (isCustomFormatType(formatType)) { return undefined; // handled in encode block } if (isFieldOrDatumDefForTimeFormat(fieldOrDatumDef)) { @@ -3925,11 +3829,11 @@ * Returns number format for a fieldDef. */ function numberFormat(type, specifiedFormat, config) { // Specified format in axis/legend has higher precedence than fieldDef.format - if (isString(specifiedFormat)) { + if (vegaUtil.isString(specifiedFormat)) { return specifiedFormat; } if (type === QUANTITATIVE) { // we only apply the default if the field is quantitative @@ -3955,39 +3859,37 @@ return omitTimeFormatConfig ? undefined : config.timeFormat; } function formatExpr(field, format) { - return "format(".concat(field, ", \"").concat(format || '', "\")"); + return `format(${field}, "${format || ''}")`; } function binNumberFormatExpr(field, format, formatType, config) { - var _ref; - if (isCustomFormatType(formatType)) { return customFormatExpr(formatType, field, format); } - return formatExpr(field, (_ref = isString(format) ? format : undefined) !== null && _ref !== void 0 ? _ref : config.numberFormat); + return formatExpr(field, (vegaUtil.isString(format) ? format : undefined) ?? config.numberFormat); } function binFormatExpression(startField, endField, format, formatType, config) { const start = binNumberFormatExpr(startField, format, formatType, config); const end = binNumberFormatExpr(endField, format, formatType, config); - return "".concat(fieldValidPredicate(startField, false), " ? \"null\" : ").concat(start, " + \"").concat(BIN_RANGE_DELIMITER, "\" + ").concat(end); + return `${fieldValidPredicate(startField, false)} ? "null" : ${start} + "${BIN_RANGE_DELIMITER}" + ${end}`; } /** * Returns the time expression used for axis/legend labels or text mark for a temporal field */ function timeFormatExpression(field, timeUnit, format, rawTimeFormat, // should be provided only for actual text and headers, not axis/legend labels isUTCScale) { if (!timeUnit || format) { // If there is no time unit, or if user explicitly specifies format for axis/legend/text. - format = isString(format) ? format : rawTimeFormat; // only use provided timeFormat if there is no timeUnit. + format = vegaUtil.isString(format) ? format : rawTimeFormat; // only use provided timeFormat if there is no timeUnit. - return "".concat(isUTCScale ? 'utc' : 'time', "Format(").concat(field, ", '").concat(format, "')"); + return `${isUTCScale ? 'utc' : 'time'}Format(${field}, '${format}')`; } else { return formatExpression(timeUnit, field, isUTCScale); } } @@ -4018,11 +3920,11 @@ } function isSortField(sort) { return !!sort && (sort['op'] === 'count' || !!sort['field']); } function isSortArray(sort) { - return !!sort && isArray(sort); + return !!sort && vegaUtil.isArray(sort); } function isFacetMapping(f) { return 'row' in f || 'column' in f; } @@ -4039,15 +3941,15 @@ function isFacetSpec(spec) { return 'facet' in spec; } - function isConditionalSelection(c) { - return c['selection']; + function isConditionalParameter(c) { + return c['param']; } function isRepeatRef(field) { - return field && !isString(field) && 'repeat' in field; + return field && !vegaUtil.isString(field) && 'repeat' in field; } /** @@hidden */ function toFieldDefBase(fieldDef) { const { @@ -4069,120 +3971,166 @@ }; } function isSortableFieldDef(fieldDef) { return 'sort' in fieldDef; } - function getBand({ + function getBandPosition({ + fieldDef, + fieldDef2, + markDef: mark, + config + }) { + if (isFieldOrDatumDef(fieldDef) && fieldDef.bandPosition !== undefined) { + return fieldDef.bandPosition; + } + + if (isFieldDef(fieldDef)) { + const { + timeUnit, + bin + } = fieldDef; + + if (timeUnit && !fieldDef2) { + return isRectBasedMark(mark.type) ? 0 : getMarkConfig('timeUnitBandPosition', mark, config); + } else if (isBinning(bin)) { + return 0.5; + } + } + + return undefined; + } + function getBandSize({ channel, fieldDef, fieldDef2, markDef: mark, - stack, config, - isMidPoint + scaleType, + useVlSizeChannel }) { - if (isFieldOrDatumDef(fieldDef) && fieldDef.band !== undefined) { - return fieldDef.band; + const sizeChannel = getSizeChannel$1(channel); + const size = getMarkPropOrConfig(useVlSizeChannel ? 'size' : sizeChannel, mark, config, { + vgChannel: sizeChannel + }); + + if (size !== undefined) { + return size; } if (isFieldDef(fieldDef)) { const { timeUnit, bin } = fieldDef; if (timeUnit && !fieldDef2) { - if (isMidPoint) { - return getMarkConfig('timeUnitBandPosition', mark, config); + return { + band: getMarkConfig('timeUnitBandSize', mark, config) + }; + } else if (isBinning(bin) && !hasDiscreteDomain(scaleType)) { + return { + band: 1 + }; + } + } + + if (isRectBasedMark(mark.type)) { + var _config$mark$type3; + + if (scaleType) { + if (hasDiscreteDomain(scaleType)) { + var _config$mark$type; + + return ((_config$mark$type = config[mark.type]) === null || _config$mark$type === void 0 ? void 0 : _config$mark$type.discreteBandSize) || { + band: 1 + }; } else { - return isRectBasedMark(mark.type) ? getMarkConfig('timeUnitBand', mark, config) : 0; + var _config$mark$type2; + + return (_config$mark$type2 = config[mark.type]) === null || _config$mark$type2 === void 0 ? void 0 : _config$mark$type2.continuousBandSize; } - } else if (isBinning(bin)) { - return isRectBasedMark(mark.type) && !isMidPoint ? 1 : 0.5; } - } - if ((stack === null || stack === void 0 ? void 0 : stack.fieldChannel) === channel && isMidPoint) { - return 0.5; + return (_config$mark$type3 = config[mark.type]) === null || _config$mark$type3 === void 0 ? void 0 : _config$mark$type3.discreteBandSize; } return undefined; } - function hasBand(channel, fieldDef, fieldDef2, stack, markDef, config) { + function hasBandEnd(fieldDef, fieldDef2, markDef, config) { if (isBinning(fieldDef.bin) || fieldDef.timeUnit && isTypedFieldDef(fieldDef) && fieldDef.type === 'temporal') { - return !!getBand({ - channel, + // Need to check bandPosition because non-rect marks (e.g., point) with timeUnit + // doesn't have to use bandEnd if there is no bandPosition. + return getBandPosition({ fieldDef, fieldDef2, - stack, markDef, config - }); + }) !== undefined; } return false; } /** * Field definition of a mark property, which can contain a legend. */ function isConditionalDef(channelDef) { - return !!channelDef && 'condition' in channelDef; + return channelDef && 'condition' in channelDef; } /** * Return if a channelDef is a ConditionalValueDef with ConditionFieldDef */ function hasConditionalFieldDef(channelDef) { const condition = channelDef && channelDef['condition']; - return !!condition && !isArray(condition) && isFieldDef(condition); + return !!condition && !vegaUtil.isArray(condition) && isFieldDef(condition); } function hasConditionalFieldOrDatumDef(channelDef) { const condition = channelDef && channelDef['condition']; - return !!condition && !isArray(condition) && isFieldOrDatumDef(condition); + return !!condition && !vegaUtil.isArray(condition) && isFieldOrDatumDef(condition); } function hasConditionalValueDef(channelDef) { const condition = channelDef && channelDef['condition']; - return !!condition && (isArray(condition) || isValueDef(condition)); + return !!condition && (vegaUtil.isArray(condition) || isValueDef(condition)); } function isFieldDef(channelDef) { // TODO: we can't use field in channelDef here as it's somehow failing runtime test - return !!channelDef && (!!channelDef['field'] || channelDef['aggregate'] === 'count'); + return channelDef && (!!channelDef['field'] || channelDef['aggregate'] === 'count'); } function channelDefType(channelDef) { return channelDef && channelDef['type']; } function isDatumDef(channelDef) { - return !!channelDef && 'datum' in channelDef; + return channelDef && 'datum' in channelDef; } function isContinuousFieldOrDatumDef(cd) { // TODO: make datum support DateTime object - return isTypedFieldDef(cd) && isContinuous(cd) || isNumericDataDef(cd); + return isTypedFieldDef(cd) && !isDiscrete(cd) || isNumericDataDef(cd); } function isNumericDataDef(cd) { - return isDatumDef(cd) && isNumber(cd.datum); + return isDatumDef(cd) && vegaUtil.isNumber(cd.datum); } function isFieldOrDatumDef(channelDef) { return isFieldDef(channelDef) || isDatumDef(channelDef); } function isTypedFieldDef(channelDef) { - return !!channelDef && ('field' in channelDef || channelDef['aggregate'] === 'count') && 'type' in channelDef; + return channelDef && ('field' in channelDef || channelDef['aggregate'] === 'count') && 'type' in channelDef; } function isValueDef(channelDef) { return channelDef && 'value' in channelDef && 'value' in channelDef; } function isScaleFieldDef(channelDef) { - return !!channelDef && ('scale' in channelDef || 'sort' in channelDef); + return channelDef && ('scale' in channelDef || 'sort' in channelDef); } function isPositionFieldOrDatumDef(channelDef) { return channelDef && ('axis' in channelDef || 'stack' in channelDef || 'impute' in channelDef); } function isMarkPropFieldOrDatumDef(channelDef) { - return !!channelDef && 'legend' in channelDef; + return channelDef && 'legend' in channelDef; } function isStringFieldOrDatumDef(channelDef) { - return !!channelDef && ('format' in channelDef || 'formatType' in channelDef); + return channelDef && ('format' in channelDef || 'formatType' in channelDef); } function toStringFieldDef(fieldDef) { // omit properties that don't exist in string field defs return omit(fieldDef, ['legend', 'axis', 'header', 'scale']); } @@ -4215,44 +4163,40 @@ aggregate, timeUnit } = fieldDef; if (isBinning(bin)) { - var _opt$binSuffix, _opt$suffix; - fn = binToString(bin); - suffix = ((_opt$binSuffix = opt.binSuffix) !== null && _opt$binSuffix !== void 0 ? _opt$binSuffix : '') + ((_opt$suffix = opt.suffix) !== null && _opt$suffix !== void 0 ? _opt$suffix : ''); + suffix = (opt.binSuffix ?? '') + (opt.suffix ?? ''); } else if (aggregate) { if (isArgmaxDef(aggregate)) { - argAccessor = "[\"".concat(field, "\"]"); - field = "argmax_".concat(aggregate.argmax); + argAccessor = `["${field}"]`; + field = `argmax_${aggregate.argmax}`; } else if (isArgminDef(aggregate)) { - argAccessor = "[\"".concat(field, "\"]"); - field = "argmin_".concat(aggregate.argmin); + argAccessor = `["${field}"]`; + field = `argmin_${aggregate.argmin}`; } else { fn = String(aggregate); } } else if (timeUnit) { - var _opt$suffix2; - fn = timeUnitToString(timeUnit); - suffix = (!contains(['range', 'mid'], opt.binSuffix) && opt.binSuffix || '') + ((_opt$suffix2 = opt.suffix) !== null && _opt$suffix2 !== void 0 ? _opt$suffix2 : ''); + suffix = (!['range', 'mid'].includes(opt.binSuffix) && opt.binSuffix || '') + (opt.suffix ?? ''); } } } if (fn) { - field = field ? "".concat(fn, "_").concat(field) : fn; + field = field ? `${fn}_${field}` : fn; } } if (suffix) { - field = "".concat(field, "_").concat(suffix); + field = `${field}_${suffix}`; } if (prefix) { - field = "".concat(prefix, "_").concat(field); + field = `${prefix}_${field}`; } if (opt.forAs) { return removePathFromField(field); } else if (opt.expr) { @@ -4277,12 +4221,14 @@ return false; } throw new Error(invalidFieldType(def.type)); } - function isContinuous(fieldDef) { - return !isDiscrete(fieldDef); + function isDiscretizing(def) { + var _def$scale; + + return isScaleFieldDef(def) && isContinuousToDiscrete((_def$scale = def.scale) === null || _def$scale === void 0 ? void 0 : _def$scale.type); } function isCount(fieldDef) { return fieldDef.aggregate === 'count'; } function verbalTitleFormatter(fieldDef, config) { @@ -4294,26 +4240,26 @@ } = fieldDef; if (aggregate === 'count') { return config.countTitle; } else if (isBinning(bin)) { - return "".concat(field, " (binned)"); + return `${field} (binned)`; } else if (timeUnit) { var _normalizeTimeUnit; const unit = (_normalizeTimeUnit = normalizeTimeUnit(timeUnit)) === null || _normalizeTimeUnit === void 0 ? void 0 : _normalizeTimeUnit.unit; if (unit) { - return "".concat(field, " (").concat(getTimeUnitParts(unit).join('-'), ")"); + return `${field} (${getTimeUnitParts(unit).join('-')})`; } } else if (aggregate) { if (isArgmaxDef(aggregate)) { - return "".concat(field, " for max ").concat(aggregate.argmax); + return `${field} for max ${aggregate.argmax}`; } else if (isArgminDef(aggregate)) { - return "".concat(field, " for min ").concat(aggregate.argmin); + return `${field} for min ${aggregate.argmin}`; } else { - return "".concat(titleCase(aggregate), " of ").concat(field); + return `${titleCase(aggregate)} of ${field}`; } } return field; } @@ -4324,20 +4270,20 @@ timeUnit, field } = fieldDef; if (isArgmaxDef(aggregate)) { - return "".concat(field, " for argmax(").concat(aggregate.argmax, ")"); + return `${field} for argmax(${aggregate.argmax})`; } else if (isArgminDef(aggregate)) { - return "".concat(field, " for argmin(").concat(aggregate.argmin, ")"); + return `${field} for argmin(${aggregate.argmin})`; } const timeUnitParams = normalizeTimeUnit(timeUnit); const fn = aggregate || (timeUnitParams === null || timeUnitParams === void 0 ? void 0 : timeUnitParams.unit) || (timeUnitParams === null || timeUnitParams === void 0 ? void 0 : timeUnitParams.maxbins) && 'timeunit' || isBinning(bin) && 'bin'; if (fn) { - return fn.toUpperCase() + '(' + field + ')'; + return `${fn.toUpperCase()}(${field})`; } else { return field; } } const defaultTitleFormatter = (fieldDef, config) => { @@ -4375,13 +4321,11 @@ const def = includeDefault ? defaultTitle(fieldDef, config) : undefined; if (allowDisabling) { return getFirstDefined(guideTitle, fieldDef.title, def); } else { - var _ref; - - return (_ref = guideTitle !== null && guideTitle !== void 0 ? guideTitle : fieldDef.title) !== null && _ref !== void 0 ? _ref : def; + return guideTitle ?? fieldDef.title ?? def; } } function getGuide(fieldDef) { if (isPositionFieldOrDatumDef(fieldDef) && fieldDef.axis) { return fieldDef.axis; @@ -4405,24 +4349,22 @@ return { format, formatType }; } else { - var _getGuide2; - - const guide = (_getGuide2 = getGuide(fieldDef)) !== null && _getGuide2 !== void 0 ? _getGuide2 : {}; + const guide = getGuide(fieldDef) ?? {}; const { format, formatType } = guide; return { format, formatType }; } } - function defaultType(fieldDef, channel) { + function defaultType$2(fieldDef, channel) { var _fieldDef$scale; switch (channel) { case 'latitude': case 'longitude': @@ -4437,11 +4379,11 @@ case 'order': return 'ordinal'; } - if (isSortableFieldDef(fieldDef) && isArray(fieldDef.sort)) { + if (isSortableFieldDef(fieldDef) && vegaUtil.isArray(fieldDef.sort)) { return 'ordinal'; } const { aggregate, @@ -4455,11 +4397,11 @@ if (bin || aggregate && !isArgmaxDef(aggregate) && !isArgminDef(aggregate)) { return 'quantitative'; } - if (isScaleFieldDef(fieldDef) && ((_fieldDef$scale = fieldDef.scale) === null || _fieldDef$scale === void 0 ? void 0 : _fieldDef$scale.type)) { + if (isScaleFieldDef(fieldDef) && (_fieldDef$scale = fieldDef.scale) !== null && _fieldDef$scale !== void 0 && _fieldDef$scale.type) { switch (SCALE_CATEGORY_INDEX[fieldDef.scale.type]) { case 'numeric': case 'discretizing': return 'quantitative'; @@ -4496,12 +4438,12 @@ /** * Convert type to full, lowercase type, or augment the fieldDef with a default type if missing. */ function initChannelDef(channelDef, channel, config, opt = {}) { - if (isString(channelDef) || isNumber(channelDef) || isBoolean(channelDef)) { - const primitiveType = isString(channelDef) ? 'string' : isNumber(channelDef) ? 'number' : 'boolean'; + if (vegaUtil.isString(channelDef) || vegaUtil.isNumber(channelDef) || vegaUtil.isBoolean(channelDef)) { + const primitiveType = vegaUtil.isString(channelDef) ? 'string' : vegaUtil.isNumber(channelDef) ? 'number' : 'boolean'; warn(primitiveChannelDef(channel, primitiveType, channelDef)); return { value: channelDef }; } // If a fieldDef contains a field, we need type. @@ -4564,11 +4506,11 @@ } const { datum } = datumDef; - type = isNumber(datum) ? 'quantitative' : isString(datum) ? 'nominal' : isDateTime(datum) ? 'temporal' : undefined; + type = vegaUtil.isNumber(datum) ? 'quantitative' : vegaUtil.isString(datum) ? 'nominal' : isDateTime(datum) ? 'temporal' : undefined; return { ...datumDef, type }; } @@ -4593,11 +4535,11 @@ if (timeUnit) { fieldDef.timeUnit = normalizeTimeUnit(timeUnit); } if (field) { - fieldDef.field = "".concat(field); + fieldDef.field = `${field}`; } // Normalize bin if (isBinning(bin)) { fieldDef.bin = normalizeBin(bin, channel); @@ -4625,11 +4567,11 @@ fieldDef.type = 'quantitative'; } } } else if (!isSecondaryRangeChannel(channel)) { // If type is empty / invalid, then augment with default type - const newType = defaultType(fieldDef, channel); + const newType = defaultType$2(fieldDef, channel); fieldDef['type'] = newType; } if (isTypedFieldDef(fieldDef)) { const { @@ -4640,11 +4582,11 @@ if (compatible === false) { warn(warning); } } - if (isSortableFieldDef(fieldDef) && isString(fieldDef.sort)) { + if (isSortableFieldDef(fieldDef) && vegaUtil.isString(fieldDef.sort)) { const { sort } = fieldDef; if (isSortByChannel(sort)) { @@ -4669,29 +4611,32 @@ if (isFacetFieldDef(fieldDef)) { const { header } = fieldDef; - const { - orient, - ...rest - } = header; - if (orient) { - return { ...fieldDef, - header: { ...rest, - labelOrient: header.labelOrient || orient, - titleOrient: header.titleOrient || orient - } - }; + if (header) { + const { + orient, + ...rest + } = header; + + if (orient) { + return { ...fieldDef, + header: { ...rest, + labelOrient: header.labelOrient || orient, + titleOrient: header.titleOrient || orient + } + }; + } } } return fieldDef; } function normalizeBin(bin, channel) { - if (isBoolean(bin)) { + if (vegaUtil.isBoolean(bin)) { return { maxbins: autoMaxBins(channel) }; } else if (bin === 'binned') { return { @@ -4712,33 +4657,33 @@ const type = fieldDef.type; if (type === 'geojson' && channel !== 'shape') { return { compatible: false, - warning: "Channel ".concat(channel, " should not be used with a geojson data.") + warning: `Channel ${channel} should not be used with a geojson data.` }; } switch (channel) { case ROW: case COLUMN: case FACET: - if (isContinuous(fieldDef)) { + if (!isDiscrete(fieldDef)) { return { compatible: false, - warning: facetChannelShouldBeDiscrete(channel) + warning: channelShouldBeDiscrete(channel) }; } return COMPATIBLE; case X: case Y: case COLOR: case FILL: case STROKE: - case TEXT: + case TEXT$1: case DETAIL: case KEY: case TOOLTIP: case HREF: case URL: @@ -4753,11 +4698,11 @@ case LATITUDE: case LATITUDE2: if (type !== QUANTITATIVE) { return { compatible: false, - warning: "Channel ".concat(channel, " should be used with a quantitative field only, not ").concat(fieldDef.type, " field.") + warning: `Channel ${channel} should be used with a quantitative field only, not ${fieldDef.type} field.` }; } return COMPATIBLE; @@ -4771,41 +4716,32 @@ case X2: case Y2: if (type === 'nominal' && !fieldDef['sort']) { return { compatible: false, - warning: "Channel ".concat(channel, " should not be used with an unsorted discrete field.") + warning: `Channel ${channel} should not be used with an unsorted discrete field.` }; } return COMPATIBLE; + case SHAPE: case STROKEDASH: - if (!contains(['ordinal', 'nominal'], fieldDef.type)) { + if (!isDiscrete(fieldDef) && !isDiscretizing(fieldDef)) { return { compatible: false, - warning: 'StrokeDash channel should be used with only discrete data.' + warning: channelShouldBeDiscreteOrDiscretizing(channel) }; } return COMPATIBLE; - case SHAPE: - if (!contains(['ordinal', 'nominal', 'geojson'], fieldDef.type)) { - return { - compatible: false, - warning: 'Shape channel should be used with only either discrete or geojson data.' - }; - } - - return COMPATIBLE; - case ORDER: if (fieldDef.type === 'nominal' && !('sort' in fieldDef)) { return { compatible: false, - warning: "Channel order is inappropriate for nominal field, which has no inherent order." + warning: `Channel order is inappropriate for nominal field, which has no inherent order.` }; } return COMPATIBLE; } @@ -4850,31 +4786,31 @@ } else if (isSignalRef(v)) { expr = v.signal; } else if (isDateTime(v)) { isTime = true; expr = dateTimeToExpr(v); - } else if (isString(v) || isNumber(v)) { + } else if (vegaUtil.isString(v) || vegaUtil.isNumber(v)) { if (isTime) { - expr = "datetime(".concat(JSON.stringify(v), ")"); + expr = `datetime(${stringify(v)})`; if (isLocalSingleTimeUnit(unit)) { // for single timeUnit, we will use dateTimeToExpr to convert number/string to match the timeUnit - if (isNumber(v) && v < 10000 || isString(v) && isNaN(Date.parse(v))) { + if (vegaUtil.isNumber(v) && v < 10000 || vegaUtil.isString(v) && isNaN(Date.parse(v))) { expr = dateTimeToExpr({ [unit]: v }); } } } } if (expr) { - return wrapTime && isTime ? "time(".concat(expr, ")") : expr; + return wrapTime && isTime ? `time(${expr})` : expr; } // number or boolean or normal string - return undefinedIfExprNotRequired ? undefined : JSON.stringify(v); + return undefinedIfExprNotRequired ? undefined : stringify(v); } /** * Standardize value array -- convert each value to Vega expression if applicable */ @@ -4909,298 +4845,300 @@ return false; } // We need the range only when the user explicitly forces a binned field to be use discrete scale. In this case, bin range is used in axis and legend labels. // We could check whether the axis or legend exists (not disabled) but that seems overkill. - return isScaleChannel(channel) && contains(['ordinal', 'nominal'], fieldDef.type); + return isScaleChannel(channel) && ['ordinal', 'nominal'].includes(fieldDef.type); } - function extractTitleConfig(titleConfig) { - const { - // These are non-mark title config that need to be hardcoded - anchor, - frame, - offset, - orient, - // color needs to be redirect to fill - color, - // subtitle properties - subtitleColor, - subtitleFont, - subtitleFontSize, - subtitleFontStyle, - subtitleFontWeight, - subtitleLineHeight, - subtitlePadding, - // The rest are mark config. - ...rest - } = titleConfig; - const titleMarkConfig = { ...rest, - ...(color ? { - fill: color - } : {}) - }; // These are non-mark title config that need to be hardcoded - - const nonMark = { ...(anchor ? { - anchor - } : {}), - ...(frame ? { - frame - } : {}), - ...(offset ? { - offset - } : {}), - ...(orient ? { - orient - } : {}) - }; // subtitle part can stay in config.title since header titles do not use subtitle - - const subtitle = { ...(subtitleColor ? { - subtitleColor - } : {}), - ...(subtitleFont ? { - subtitleFont - } : {}), - ...(subtitleFontSize ? { - subtitleFontSize - } : {}), - ...(subtitleFontStyle ? { - subtitleFontStyle - } : {}), - ...(subtitleFontWeight ? { - subtitleFontWeight - } : {}), - ...(subtitleLineHeight ? { - subtitleLineHeight - } : {}), - ...(subtitlePadding ? { - subtitlePadding - } : {}) - }; - const subtitleMarkConfig = pick(titleMarkConfig, ['align', 'baseline', 'dx', 'dy', 'limit']); - return { - titleMarkConfig, - subtitleMarkConfig, - nonMark, - subtitle - }; - } - function isText(v) { - return isString(v) || isArray(v) && isString(v[0]); - } - - function signalOrValueRefWithCondition(val) { - const condition = isArray(val.condition) ? val.condition.map(conditionalSignalRefOrValue) : conditionalSignalRefOrValue(val.condition); - return { ...signalRefOrValue(val), - condition - }; - } - function signalRefOrValue(value) { - if (isExprRef(value)) { - const { - expr, - ...rest - } = value; - return { - signal: expr, - ...rest - }; + const CONDITIONAL_AXIS_PROP_INDEX = { + labelAlign: { + part: 'labels', + vgProp: 'align' + }, + labelBaseline: { + part: 'labels', + vgProp: 'baseline' + }, + labelColor: { + part: 'labels', + vgProp: 'fill' + }, + labelFont: { + part: 'labels', + vgProp: 'font' + }, + labelFontSize: { + part: 'labels', + vgProp: 'fontSize' + }, + labelFontStyle: { + part: 'labels', + vgProp: 'fontStyle' + }, + labelFontWeight: { + part: 'labels', + vgProp: 'fontWeight' + }, + labelOpacity: { + part: 'labels', + vgProp: 'opacity' + }, + labelOffset: null, + labelPadding: null, + // There is no fixed vgProp for tickSize, need to use signal. + gridColor: { + part: 'grid', + vgProp: 'stroke' + }, + gridDash: { + part: 'grid', + vgProp: 'strokeDash' + }, + gridDashOffset: { + part: 'grid', + vgProp: 'strokeDashOffset' + }, + gridOpacity: { + part: 'grid', + vgProp: 'opacity' + }, + gridWidth: { + part: 'grid', + vgProp: 'strokeWidth' + }, + tickColor: { + part: 'ticks', + vgProp: 'stroke' + }, + tickDash: { + part: 'ticks', + vgProp: 'strokeDash' + }, + tickDashOffset: { + part: 'ticks', + vgProp: 'strokeDashOffset' + }, + tickOpacity: { + part: 'ticks', + vgProp: 'opacity' + }, + tickSize: null, + // There is no fixed vgProp for tickSize, need to use signal. + tickWidth: { + part: 'ticks', + vgProp: 'strokeWidth' } - - return value; + }; + function isConditionalAxisValue(v) { + return v && v['condition']; } - function conditionalSignalRefOrValue(value) { - if (isExprRef(value)) { - const { - expr, - ...rest - } = value; - return { - signal: expr, - ...rest - }; - } - - return value; - } - function signalOrValueRef(value) { - if (isExprRef(value)) { - const { - expr, - ...rest - } = value; - return { - signal: expr, - ...rest - }; - } - - if (isSignalRef(value)) { - return value; - } - - return value !== undefined ? { - value - } : undefined; - } - function exprFromValueOrSignalRef(ref) { - if (isSignalRef(ref)) { - return ref.signal; - } - - return $(ref.value); - } - function signalOrStringValue(v) { - if (isSignalRef(v)) { - return v.signal; - } - - return v == null ? null : $(v); - } - function applyMarkConfig(e, model, propsList) { - for (const property of propsList) { - const value = getMarkConfig(property, model.markDef, model.config); - - if (value !== undefined) { - e[property] = signalOrValueRef(value); - } - } - - return e; - } - function getStyles(mark) { - var _mark$style; - - return [].concat(mark.type, (_mark$style = mark.style) !== null && _mark$style !== void 0 ? _mark$style : []); - } - function getMarkPropOrConfig(channel, mark, config, opt = {}) { - const { - vgChannel, - ignoreVgConfig - } = opt; - - if (vgChannel && mark[vgChannel] !== undefined) { - return mark[vgChannel]; - } else if (mark[channel] !== undefined) { - return mark[channel]; - } else if (ignoreVgConfig && (!vgChannel || vgChannel === channel)) { - return undefined; - } - - return getMarkConfig(channel, mark, config, opt); - } + const AXIS_PARTS = ['domain', 'grid', 'labels', 'ticks', 'title']; /** - * Return property value from style or mark specific config property if exists. - * Otherwise, return general mark specific config. + * A dictionary listing whether a certain axis property is applicable for only main axes or only grid axes. */ - function getMarkConfig(channel, mark, config, { - vgChannel - } = {}) { - return getFirstDefined( // style config has highest precedence - vgChannel ? getMarkStyleConfig(channel, mark, config.style) : undefined, getMarkStyleConfig(channel, mark, config.style), // then mark-specific config - vgChannel ? config[mark.type][vgChannel] : undefined, config[mark.type][channel], // Need to cast because MarkDef doesn't perfectly match with AnyMarkConfig, but if the type isn't available, we'll get nothing here, which is fine - // If there is vgChannel, skip vl channel. - // For example, vl size for text is vg fontSize, but config.mark.size is only for point size. - vgChannel ? config.mark[vgChannel] : config.mark[channel] // Need to cast for the same reason as above - ); - } - function getMarkStyleConfig(prop, mark, styleConfigIndex) { - return getStyleConfig(prop, getStyles(mark), styleConfigIndex); - } - function getStyleConfig(p, styles, styleConfigIndex) { - styles = array(styles); - let value; + const AXIS_PROPERTY_TYPE = { + grid: 'grid', + gridCap: 'grid', + gridColor: 'grid', + gridDash: 'grid', + gridDashOffset: 'grid', + gridOpacity: 'grid', + gridScale: 'grid', + gridWidth: 'grid', + orient: 'main', + bandPosition: 'both', + // Need to be applied to grid axis too, so the grid will align with ticks. + aria: 'main', + description: 'main', + domain: 'main', + domainCap: 'main', + domainColor: 'main', + domainDash: 'main', + domainDashOffset: 'main', + domainOpacity: 'main', + domainWidth: 'main', + format: 'main', + formatType: 'main', + labelAlign: 'main', + labelAngle: 'main', + labelBaseline: 'main', + labelBound: 'main', + labelColor: 'main', + labelFlush: 'main', + labelFlushOffset: 'main', + labelFont: 'main', + labelFontSize: 'main', + labelFontStyle: 'main', + labelFontWeight: 'main', + labelLimit: 'main', + labelLineHeight: 'main', + labelOffset: 'main', + labelOpacity: 'main', + labelOverlap: 'main', + labelPadding: 'main', + labels: 'main', + labelSeparation: 'main', + maxExtent: 'main', + minExtent: 'main', + offset: 'both', + position: 'main', + tickCap: 'main', + tickColor: 'main', + tickDash: 'main', + tickDashOffset: 'main', + tickMinStep: 'both', + tickOffset: 'both', + // Need to be applied to grid axis too, so the grid will align with ticks. + tickOpacity: 'main', + tickRound: 'both', + // Apply rounding to grid and ticks so they are aligned. + ticks: 'main', + tickSize: 'main', + tickWidth: 'both', + title: 'main', + titleAlign: 'main', + titleAnchor: 'main', + titleAngle: 'main', + titleBaseline: 'main', + titleColor: 'main', + titleFont: 'main', + titleFontSize: 'main', + titleFontStyle: 'main', + titleFontWeight: 'main', + titleLimit: 'main', + titleLineHeight: 'main', + titleOpacity: 'main', + titlePadding: 'main', + titleX: 'main', + titleY: 'main', + encode: 'both', + // we hide this in Vega-Lite + scale: 'both', + tickBand: 'both', + tickCount: 'both', + tickExtra: 'both', + translate: 'both', + values: 'both', + zindex: 'both' // this is actually set afterward, so it doesn't matter - for (const style of styles) { - const styleConfig = styleConfigIndex[style]; + }; + const COMMON_AXIS_PROPERTIES_INDEX = { + orient: 1, + // other things can depend on orient + aria: 1, + bandPosition: 1, + description: 1, + domain: 1, + domainCap: 1, + domainColor: 1, + domainDash: 1, + domainDashOffset: 1, + domainOpacity: 1, + domainWidth: 1, + format: 1, + formatType: 1, + grid: 1, + gridCap: 1, + gridColor: 1, + gridDash: 1, + gridDashOffset: 1, + gridOpacity: 1, + gridWidth: 1, + labelAlign: 1, + labelAngle: 1, + labelBaseline: 1, + labelBound: 1, + labelColor: 1, + labelFlush: 1, + labelFlushOffset: 1, + labelFont: 1, + labelFontSize: 1, + labelFontStyle: 1, + labelFontWeight: 1, + labelLimit: 1, + labelLineHeight: 1, + labelOffset: 1, + labelOpacity: 1, + labelOverlap: 1, + labelPadding: 1, + labels: 1, + labelSeparation: 1, + maxExtent: 1, + minExtent: 1, + offset: 1, + position: 1, + tickBand: 1, + tickCap: 1, + tickColor: 1, + tickCount: 1, + tickDash: 1, + tickDashOffset: 1, + tickExtra: 1, + tickMinStep: 1, + tickOffset: 1, + tickOpacity: 1, + tickRound: 1, + ticks: 1, + tickSize: 1, + tickWidth: 1, + title: 1, + titleAlign: 1, + titleAnchor: 1, + titleAngle: 1, + titleBaseline: 1, + titleColor: 1, + titleFont: 1, + titleFontSize: 1, + titleFontStyle: 1, + titleFontWeight: 1, + titleLimit: 1, + titleLineHeight: 1, + titleOpacity: 1, + titlePadding: 1, + titleX: 1, + titleY: 1, + translate: 1, + values: 1, + zindex: 1 + }; + const AXIS_PROPERTIES_INDEX = { ...COMMON_AXIS_PROPERTIES_INDEX, + style: 1, + labelExpr: 1, + encoding: 1 + }; + function isAxisProperty(prop) { + return !!AXIS_PROPERTIES_INDEX[prop]; + } // Export for dependent projects + const AXIS_CONFIGS_INDEX = { + axis: 1, + axisBand: 1, + axisBottom: 1, + axisDiscrete: 1, + axisLeft: 1, + axisPoint: 1, + axisQuantitative: 1, + axisRight: 1, + axisTemporal: 1, + axisTop: 1, + axisX: 1, + axisXBand: 1, + axisXDiscrete: 1, + axisXPoint: 1, + axisXQuantitative: 1, + axisXTemporal: 1, + axisY: 1, + axisYBand: 1, + axisYDiscrete: 1, + axisYPoint: 1, + axisYQuantitative: 1, + axisYTemporal: 1 + }; + const AXIS_CONFIGS = keys(AXIS_CONFIGS_INDEX); - if (styleConfig && styleConfig[p] !== undefined) { - value = styleConfig[p]; - } - } - - return value; - } /** - * Return Vega sort parameters (tuple of field and order). - */ - - function sortParams(orderDef, fieldRefOption) { - return array(orderDef).reduce((s, orderChannelDef) => { - var _orderChannelDef$sort; - - s.field.push(vgField(orderChannelDef, fieldRefOption)); - s.order.push((_orderChannelDef$sort = orderChannelDef.sort) !== null && _orderChannelDef$sort !== void 0 ? _orderChannelDef$sort : 'ascending'); - return s; - }, { - field: [], - order: [] - }); - } - function mergeTitleFieldDefs(f1, f2) { - const merged = [...f1]; - f2.forEach(fdToMerge => { - for (const fieldDef1 of merged) { - // If already exists, no need to append to merged array - if (deepEqual(fieldDef1, fdToMerge)) { - return; - } - } - - merged.push(fdToMerge); - }); - return merged; - } - function mergeTitle(title1, title2) { - if (deepEqual(title1, title2) || !title2) { - // if titles are the same or title2 is falsy - return title1; - } else if (!title1) { - // if title1 is falsy - return title2; - } else { - return [...array(title1), ...array(title2)].join(', '); - } - } - function mergeTitleComponent(v1, v2) { - const v1Val = v1.value; - const v2Val = v2.value; - - if (v1Val == null || v2Val === null) { - return { - explicit: v1.explicit, - value: null - }; - } else if ((isText(v1Val) || isSignalRef(v1Val)) && (isText(v2Val) || isSignalRef(v2Val))) { - return { - explicit: v1.explicit, - value: mergeTitle(v1Val, v2Val) - }; - } else if (isText(v1Val) || isSignalRef(v1Val)) { - return { - explicit: v1.explicit, - value: v1Val - }; - } else if (isText(v2Val) || isSignalRef(v2Val)) { - return { - explicit: v1.explicit, - value: v2Val - }; - } else if (!isText(v1Val) && !isSignalRef(v1Val) && !isText(v2Val) && !isSignalRef(v2Val)) { - return { - explicit: v1.explicit, - value: mergeTitleFieldDefs(v1Val, v2Val) - }; - } - /* istanbul ignore next: Condition should not happen -- only for warning in development. */ - - - throw new Error('It should never reach here'); - } - - /** * Base interface for a unit (single-view) specification. */ /** * A unit specification without any shortcut/expansion syntax. @@ -5235,25 +5173,25 @@ function channelHasField(encoding, channel) { const channelDef = encoding && encoding[channel]; if (channelDef) { - if (isArray(channelDef)) { + if (vegaUtil.isArray(channelDef)) { return some(channelDef, fieldDef => !!fieldDef.field); } else { return isFieldDef(channelDef) || hasConditionalFieldDef(channelDef); } } return false; } - function isAggregate(encoding) { + function isAggregate$1(encoding) { return some(CHANNELS, channel => { if (channelHasField(encoding, channel)) { const channelDef = encoding[channel]; - if (isArray(channelDef)) { + if (vegaUtil.isArray(channelDef)) { return some(channelDef, fieldDef => !!fieldDef.aggregate); } else { const fieldDef = getFieldDef(channelDef); return fieldDef && !!fieldDef.aggregate; } @@ -5279,11 +5217,11 @@ ...remaining } = channelDef; if (aggOp || timeUnit || bin) { const guide = getGuide(channelDef); - const isTitleDefined = guide && guide.title; + const isTitleDefined = guide === null || guide === void 0 ? void 0 : guide.title; let newField = vgField(channelDef, { forAs: true }); const newFieldDef = { // Only add title if it doesn't exist ...(isTitleDefined ? [] : { @@ -5305,20 +5243,20 @@ op: 'argmax', field: aggOp.argmax }, { forAs: true }); - newFieldDef.field = "".concat(newField, ".").concat(field); + newFieldDef.field = `${newField}.${field}`; } else if (isArgminDef(aggOp)) { op = 'argmin'; newField = vgField({ op: 'argmin', field: aggOp.argmin }, { forAs: true }); - newFieldDef.field = "".concat(newField, ".").concat(field); + newFieldDef.field = `${newField}.${field}`; } else if (aggOp !== 'boxplot' && aggOp !== 'errorbar' && aggOp !== 'errorband') { op = aggOp; } if (op) { @@ -5354,13 +5292,13 @@ } // Create accompanying 'x2' or 'y2' field if channel is 'x' or 'y' respectively if (isXorY(channel)) { const secondaryChannel = { - field: newField + '_end' + field: `${newField}_end` }; - encoding[channel + '2'] = secondaryChannel; + encoding[`${channel}2`] = secondaryChannel; } newFieldDef.bin = 'binned'; if (!isSecondaryRangeChannel(channel)) { @@ -5374,11 +5312,11 @@ }); // define the format type for later compilation const formatType = isTypedFieldDef(channelDef) && channelDef.type !== TEMPORAL && 'time'; if (formatType) { - if (channel === TEXT || channel === TOOLTIP) { + if (channel === TEXT$1 || channel === TOOLTIP) { newFieldDef['formatType'] = formatType; } else if (isNonPositionScaleChannel(channel)) { newFieldDef['legend'] = { formatType, ...newFieldDef['legend'] @@ -5453,11 +5391,11 @@ if (channel === SIZE && mark === 'line') { const fieldDef = getFieldDef(encoding[channel]); - if (fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.aggregate) { + if (fieldDef !== null && fieldDef !== void 0 && fieldDef.aggregate) { warn(LINE_WITH_VARYING_SIZE); return normalizedEncoding; } } // Drop color if either fill or stroke is specified @@ -5468,14 +5406,14 @@ stroke: 'stroke' in encoding })); return normalizedEncoding; } - if (channel === DETAIL || channel === ORDER && !isArray(channelDef) && !isValueDef(channelDef) || channel === TOOLTIP && isArray(channelDef)) { + if (channel === DETAIL || channel === ORDER && !vegaUtil.isArray(channelDef) && !isValueDef(channelDef) || channel === TOOLTIP && vegaUtil.isArray(channelDef)) { if (channelDef) { // Array of fieldDefs for detail channel (or production rule) - normalizedEncoding[channel] = array(channelDef).reduce((defs, fieldDef) => { + normalizedEncoding[channel] = vegaUtil.array(channelDef).reduce((defs, fieldDef) => { if (!isFieldDef(fieldDef)) { warn(emptyFieldDef(fieldDef, channel)); } else { defs.push(initFieldDef(fieldDef, channel)); } @@ -5518,11 +5456,11 @@ const arr = []; for (const channel of keys(encoding)) { if (channelHasField(encoding, channel)) { const channelDef = encoding[channel]; - const channelDefArray = array(channelDef); + const channelDefArray = vegaUtil.array(channelDef); for (const def of channelDefArray) { if (isFieldDef(def)) { arr.push(def); } else if (hasConditionalFieldDef(def)) { @@ -5540,11 +5478,11 @@ } for (const channel of keys(mapping)) { const el = mapping[channel]; - if (isArray(el)) { + if (vegaUtil.isArray(el)) { for (const channelDef of el) { f.call(thisArg, channelDef, channel); } } else { f.call(thisArg, el, channel); @@ -5557,11 +5495,11 @@ } return keys(mapping).reduce((r, channel) => { const map = mapping[channel]; - if (isArray(map)) { + if (vegaUtil.isArray(map)) { return map.reduce((r1, channelDef) => { return f.call(thisArg, r1, channelDef, channel); }, r); } else { return f.call(thisArg, r, map, channel); @@ -5592,11 +5530,11 @@ case LONGITUDE: case LATITUDE2: case LONGITUDE2: // TODO: case 'cursor': // text, shape, shouldn't be a part of line/trail/area [falls through] - case TEXT: + case TEXT$1: case SHAPE: case ANGLE: // falls through // tooltip fields should not be added to group by [falls through] case TOOLTIP: @@ -5613,12 +5551,12 @@ case DETAIL: case KEY: { const channelDef = encoding[channel]; - if (isArray(channelDef) || isFieldDef(channelDef)) { - for (const fieldDef of array(channelDef)) { + if (vegaUtil.isArray(channelDef) || isFieldDef(channelDef)) { + for (const fieldDef of vegaUtil.array(channelDef)) { if (!fieldDef.aggregate) { details.push(vgField(fieldDef, {})); } } } @@ -5671,11 +5609,11 @@ } let customTooltipWithAggregatedField; let customTooltipWithoutAggregatedField; - if (isArray(tooltip)) { + if (vegaUtil.isArray(tooltip)) { for (const t of tooltip) { if (t.aggregate) { if (!customTooltipWithAggregatedField) { customTooltipWithAggregatedField = []; } @@ -5699,11 +5637,11 @@ } else { customTooltipWithoutAggregatedField = tooltip; } } - if (isArray(customTooltipWithoutAggregatedField) && customTooltipWithoutAggregatedField.length === 1) { + if (vegaUtil.isArray(customTooltipWithoutAggregatedField) && customTooltipWithoutAggregatedField.length === 1) { customTooltipWithoutAggregatedField = customTooltipWithoutAggregatedField[0]; } return { customTooltipWithoutAggregatedField, @@ -5719,16 +5657,16 @@ const fiveSummaryTooltip = tooltipSummary.map(({ fieldPrefix, titlePrefix }) => { - const mainTitle = withFieldName ? " of ".concat(getTitle(continuousAxisChannelDef)) : ''; + const mainTitle = withFieldName ? ` of ${getTitle(continuousAxisChannelDef)}` : ''; return { field: fieldPrefix + continuousAxisChannelDef.field, type: continuousAxisChannelDef.type, title: isSignalRef(titlePrefix) ? { - signal: titlePrefix + "\"".concat(escape(mainTitle), "\"") + signal: `${titlePrefix}"${escape(mainTitle)}"` } : titlePrefix + mainTitle }; }); const tooltipFieldDefs = fieldDefs(encodingWithoutContinuousAxis).map(toStringFieldDef); return { @@ -5759,11 +5697,11 @@ return partLayerMixins(compositeMarkDef, partName, compositeMarkConfig, { mark, // TODO better remove this method and just have mark as a parameter of the method encoding: { [continuousAxis]: { - field: positionPrefix + '_' + continuousAxisChannelDef.field, + field: `${positionPrefix}_${continuousAxisChannelDef.field}`, type: continuousAxisChannelDef.type, ...(title !== undefined ? { title } : {}), ...(scale !== undefined ? { @@ -5771,13 +5709,13 @@ } : {}), ...(axis !== undefined ? { axis } : {}) }, - ...(isString(endPositionPrefix) ? { - [continuousAxis + '2']: { - field: endPositionPrefix + '_' + continuousAxisChannelDef.field + ...(vegaUtil.isString(endPositionPrefix) ? { + [`${continuousAxis}2`]: { + field: `${endPositionPrefix}_${continuousAxisChannelDef.field}` } } : {}), ...sharedEncoding, ...extraEncoding } @@ -5805,12 +5743,12 @@ opacity } : {}), ...(isMarkDef(partBaseSpec.mark) ? partBaseSpec.mark : { type: partBaseSpec.mark }), - style: "".concat(mark, "-").concat(part), - ...(isBoolean(markDef[part]) ? {} : markDef[part]) + style: `${mark}-${part}`, + ...(vegaUtil.isBoolean(markDef[part]) ? {} : markDef[part]) } }]; } return []; @@ -5820,24 +5758,24 @@ encoding } = spec; const continuousAxis = orient === 'vertical' ? 'y' : 'x'; const continuousAxisChannelDef = encoding[continuousAxis]; // Safe to cast because if x is not continuous fielddef, the orient would not be horizontal. - const continuousAxisChannelDef2 = encoding[continuousAxis + '2']; - const continuousAxisChannelDefError = encoding[continuousAxis + 'Error']; - const continuousAxisChannelDefError2 = encoding[continuousAxis + 'Error2']; + const continuousAxisChannelDef2 = encoding[`${continuousAxis}2`]; + const continuousAxisChannelDefError = encoding[`${continuousAxis}Error`]; + const continuousAxisChannelDefError2 = encoding[`${continuousAxis}Error2`]; return { continuousAxisChannelDef: filterAggregateFromChannelDef(continuousAxisChannelDef, compositeMark), continuousAxisChannelDef2: filterAggregateFromChannelDef(continuousAxisChannelDef2, compositeMark), continuousAxisChannelDefError: filterAggregateFromChannelDef(continuousAxisChannelDefError, compositeMark), continuousAxisChannelDefError2: filterAggregateFromChannelDef(continuousAxisChannelDefError2, compositeMark), continuousAxis }; } function filterAggregateFromChannelDef(continuousAxisChannelDef, compositeMark) { - if (continuousAxisChannelDef && continuousAxisChannelDef.aggregate) { + if (continuousAxisChannelDef !== null && continuousAxisChannelDef !== void 0 && continuousAxisChannelDef.aggregate) { const { aggregate, ...continuousAxisWithoutAggregate } = continuousAxisChannelDef; @@ -5893,50 +5831,48 @@ } else if (isContinuousFieldOrDatumDef(y)) { // y is continuous but x is not return 'vertical'; } else { // Neither x nor y is continuous. - throw new Error("Need a valid continuous axis for ".concat(compositeMark, "s")); + throw new Error(`Need a valid continuous axis for ${compositeMark}s`); } } const BOXPLOT = 'boxplot'; const BOXPLOT_PARTS = ['box', 'median', 'outliers', 'rule', 'ticks']; const boxPlotNormalizer = new CompositeMarkNormalizer(BOXPLOT, normalizeBoxPlot); function getBoxPlotType(extent) { - if (isNumber(extent)) { + if (vegaUtil.isNumber(extent)) { return 'tukey'; } // Ham: If we ever want to, we could add another extent syntax `{kIQR: number}` for the original [Q1-k*IQR, Q3+k*IQR] whisker and call this boxPlotType = `kIQR`. However, I'm not exposing this for now. return extent; } function normalizeBoxPlot(spec, { config }) { - var _markDef$extent; - // Need to initEncoding first so we can infer type spec = { ...spec, encoding: normalizeEncoding(spec.encoding, config) }; const { mark, encoding: _encoding, - selection, + params, projection: _p, ...outerSpec } = spec; const markDef = isMarkDef(mark) ? mark : { type: mark }; // TODO(https://github.com/vega/vega-lite/issues/3702): add selection support - if (selection) { + if (params) { warn(selectionNotSupported('boxplot')); } - const extent = (_markDef$extent = markDef.extent) !== null && _markDef$extent !== void 0 ? _markDef$extent : config.boxplot.extent; + const extent = markDef.extent ?? config.boxplot.extent; const sizeValue = getMarkPropOrConfig('size', markDef, // TODO: https://github.com/vega/vega-lite/issues/6245 config); const boxPlotType = getBoxPlotType(extent); const { bins, @@ -6052,11 +5988,11 @@ }), ...makeBoxPlotMidTick({ partName: 'median', mark: { type: 'tick', invalid: null, - ...(isObject(config.boxplot.median) && config.boxplot.median.color ? { + ...(vegaUtil.isObject(config.boxplot.median) && config.boxplot.median.color ? { color: config.boxplot.median.color } : {}), ...(sizeValue ? { size: sizeValue } : {}), @@ -6066,50 +6002,48 @@ positionPrefix: 'mid_box', extraEncoding: fiveSummaryTooltipEncoding })]; if (boxPlotType === 'min-max') { - var _outerSpec$transform; - return { ...outerSpec, - transform: ((_outerSpec$transform = outerSpec.transform) !== null && _outerSpec$transform !== void 0 ? _outerSpec$transform : []).concat(transform), + transform: (outerSpec.transform ?? []).concat(transform), layer: boxLayers }; } // Tukey Box Plot - const lowerBoxExpr = "datum[\"lower_box_".concat(continuousAxisChannelDef.field, "\"]"); - const upperBoxExpr = "datum[\"upper_box_".concat(continuousAxisChannelDef.field, "\"]"); - const iqrExpr = "(".concat(upperBoxExpr, " - ").concat(lowerBoxExpr, ")"); - const lowerWhiskerExpr = "".concat(lowerBoxExpr, " - ").concat(extent, " * ").concat(iqrExpr); - const upperWhiskerExpr = "".concat(upperBoxExpr, " + ").concat(extent, " * ").concat(iqrExpr); - const fieldExpr = "datum[\"".concat(continuousAxisChannelDef.field, "\"]"); + const lowerBoxExpr = `datum["lower_box_${continuousAxisChannelDef.field}"]`; + const upperBoxExpr = `datum["upper_box_${continuousAxisChannelDef.field}"]`; + const iqrExpr = `(${upperBoxExpr} - ${lowerBoxExpr})`; + const lowerWhiskerExpr = `${lowerBoxExpr} - ${extent} * ${iqrExpr}`; + const upperWhiskerExpr = `${upperBoxExpr} + ${extent} * ${iqrExpr}`; + const fieldExpr = `datum["${continuousAxisChannelDef.field}"]`; const joinaggregateTransform = { joinaggregate: boxParamsQuartiles(continuousAxisChannelDef.field), groupby }; const filteredWhiskerSpec = { transform: [{ - filter: "(".concat(lowerWhiskerExpr, " <= ").concat(fieldExpr, ") && (").concat(fieldExpr, " <= ").concat(upperWhiskerExpr, ")") + filter: `(${lowerWhiskerExpr} <= ${fieldExpr}) && (${fieldExpr} <= ${upperWhiskerExpr})` }, { aggregate: [{ op: 'min', field: continuousAxisChannelDef.field, - as: 'lower_whisker_' + continuousAxisChannelDef.field + as: `lower_whisker_${continuousAxisChannelDef.field}` }, { op: 'max', field: continuousAxisChannelDef.field, - as: 'upper_whisker_' + continuousAxisChannelDef.field + as: `upper_whisker_${continuousAxisChannelDef.field}` }, // preserve lower_box / upper_box { op: 'min', - field: 'lower_box_' + continuousAxisChannelDef.field, - as: 'lower_box_' + continuousAxisChannelDef.field + field: `lower_box_${continuousAxisChannelDef.field}`, + as: `lower_box_${continuousAxisChannelDef.field}` }, { op: 'max', - field: 'upper_box_' + continuousAxisChannelDef.field, - as: 'upper_box_' + continuousAxisChannelDef.field + field: `upper_box_${continuousAxisChannelDef.field}`, + as: `upper_box_${continuousAxisChannelDef.field}` }, ...aggregate], groupby }], layer: whiskerLayers }; @@ -6123,11 +6057,11 @@ } = continuousAxisChannelDef; const title = getTitle(continuousAxisChannelDef); const axisWithoutTitle = omit(axis, ['title']); const outlierLayersMixins = partLayerMixins(markDef, 'outliers', config.boxplot, { transform: [{ - filter: "(".concat(fieldExpr, " < ").concat(lowerWhiskerExpr, ") || (").concat(fieldExpr, " > ").concat(upperWhiskerExpr, ")") + filter: `(${fieldExpr} < ${lowerWhiskerExpr}) || (${fieldExpr} > ${upperWhiskerExpr})` }], mark: 'point', encoding: { [continuousAxis]: { field: continuousAxisChannelDef.field, @@ -6176,15 +6110,15 @@ function boxParamsQuartiles(continousAxisField) { return [{ op: 'q1', field: continousAxisField, - as: 'lower_box_' + continousAxisField + as: `lower_box_${continousAxisField}` }, { op: 'q3', field: continousAxisField, - as: 'upper_box_' + continousAxisField + as: `upper_box_${continousAxisField}` }]; } function boxParams(spec, extent, config) { const orient = compositeMarkOrient(spec, BOXPLOT); @@ -6195,11 +6129,11 @@ const continuousFieldName = continuousAxisChannelDef.field; const boxPlotType = getBoxPlotType(extent); const boxplotSpecificAggregate = [...boxParamsQuartiles(continuousFieldName), { op: 'median', field: continuousFieldName, - as: 'mid_box_' + continuousFieldName + as: `mid_box_${continuousFieldName}` }, { op: 'min', field: continuousFieldName, as: (boxPlotType === 'min-max' ? 'lower_whisker_' : 'min_') + continuousFieldName }, { @@ -6207,18 +6141,18 @@ field: continuousFieldName, as: (boxPlotType === 'min-max' ? 'upper_whisker_' : 'max_') + continuousFieldName }]; const postAggregateCalculates = boxPlotType === 'min-max' || boxPlotType === 'tukey' ? [] : [// This is for the original k-IQR, which we do not expose { - calculate: "datum[\"upper_box_".concat(continuousFieldName, "\"] - datum[\"lower_box_").concat(continuousFieldName, "\"]"), - as: 'iqr_' + continuousFieldName + calculate: `datum["upper_box_${continuousFieldName}"] - datum["lower_box_${continuousFieldName}"]`, + as: `iqr_${continuousFieldName}` }, { - calculate: "min(datum[\"upper_box_".concat(continuousFieldName, "\"] + datum[\"iqr_").concat(continuousFieldName, "\"] * ").concat(extent, ", datum[\"max_").concat(continuousFieldName, "\"])"), - as: 'upper_whisker_' + continuousFieldName + calculate: `min(datum["upper_box_${continuousFieldName}"] + datum["iqr_${continuousFieldName}"] * ${extent}, datum["max_${continuousFieldName}"])`, + as: `upper_whisker_${continuousFieldName}` }, { - calculate: "max(datum[\"lower_box_".concat(continuousFieldName, "\"] - datum[\"iqr_").concat(continuousFieldName, "\"] * ").concat(extent, ", datum[\"min_").concat(continuousFieldName, "\"])"), - as: 'lower_whisker_' + continuousFieldName + calculate: `max(datum["lower_box_${continuousFieldName}"] - datum["iqr_${continuousFieldName}"] * ${extent}, datum["min_${continuousFieldName}"])`, + as: `lower_whisker_${continuousFieldName}` }]; const { [continuousAxis]: oldContinuousAxisChannelDef, ...oldEncodingWithoutContinuousAxis } = spec.encoding; @@ -6339,29 +6273,29 @@ const y = encoding.y; if (isTypeAggregatedUpperLower) { // type is aggregated-upper-lower if (isTypeAggregatedError) { - throw new Error("".concat(compositeMark, " cannot be both type aggregated-upper-lower and aggregated-error")); + throw new Error(`${compositeMark} cannot be both type aggregated-upper-lower and aggregated-error`); } const x2 = encoding.x2; const y2 = encoding.y2; if (isFieldOrDatumDef(x2) && isFieldOrDatumDef(y2)) { // having both x, x2 and y, y2 - throw new Error("".concat(compositeMark, " cannot have both x2 and y2")); + throw new Error(`${compositeMark} cannot have both x2 and y2`); } else if (isFieldOrDatumDef(x2)) { if (isContinuousFieldOrDatumDef(x)) { // having x, x2 quantitative and field y, y2 are not specified return { orient: 'horizontal', inputType: 'aggregated-upper-lower' }; } else { // having x, x2 that are not both quantitative - throw new Error("Both x and x2 have to be quantitative in ".concat(compositeMark)); + throw new Error(`Both x and x2 have to be quantitative in ${compositeMark}`); } } else if (isFieldOrDatumDef(y2)) { // y2 is a FieldDef if (isContinuousFieldOrDatumDef(y)) { // having y, y2 quantitative and field x, x2 are not specified @@ -6369,11 +6303,11 @@ orient: 'vertical', inputType: 'aggregated-upper-lower' }; } else { // having y, y2 that are not both quantitative - throw new Error("Both y and y2 have to be quantitative in ".concat(compositeMark)); + throw new Error(`Both y and y2 have to be quantitative in ${compositeMark}`); } } throw new Error('No ranged axis'); } else { @@ -6383,21 +6317,21 @@ const yError = encoding.yError; const yError2 = encoding.yError2; if (isFieldOrDatumDef(xError2) && !isFieldOrDatumDef(xError)) { // having xError2 without xError - throw new Error("".concat(compositeMark, " cannot have xError2 without xError")); + throw new Error(`${compositeMark} cannot have xError2 without xError`); } if (isFieldOrDatumDef(yError2) && !isFieldOrDatumDef(yError)) { // having yError2 without yError - throw new Error("".concat(compositeMark, " cannot have yError2 without yError")); + throw new Error(`${compositeMark} cannot have yError2 without yError`); } if (isFieldOrDatumDef(xError) && isFieldOrDatumDef(yError)) { // having both xError and yError - throw new Error("".concat(compositeMark, " cannot have both xError and yError with both are quantiative")); + throw new Error(`${compositeMark} cannot have both xError and yError with both are quantiative`); } else if (isFieldOrDatumDef(xError)) { if (isContinuousFieldOrDatumDef(x)) { // having x and xError that are all quantitative return { orient: 'horizontal', @@ -6435,25 +6369,23 @@ function errorBarIsInputTypeAggregatedError(encoding) { return isFieldOrDatumDef(encoding.xError) || isFieldOrDatumDef(encoding.xError2) || isFieldOrDatumDef(encoding.yError) || isFieldOrDatumDef(encoding.yError2); } function errorBarParams(spec, compositeMark, config) { - var _outerSpec$transform; - // TODO: use selection const { mark, encoding, - selection, + params, projection: _p, ...outerSpec } = spec; const markDef = isMarkDef(mark) ? mark : { type: mark }; // TODO(https://github.com/vega/vega-lite/issues/3702): add selection support - if (selection) { + if (params) { warn(selectionNotSupported(compositeMark)); } const { orient, @@ -6488,11 +6420,11 @@ } = extractTransformsFromEncoding(oldEncodingWithoutContinuousAxis, config); const aggregate = [...oldAggregate, ...errorBarSpecificAggregate]; const groupby = inputType !== 'raw' ? [] : oldGroupBy; const tooltipEncoding = getCompositeMarkTooltip(tooltipSummary, continuousAxisChannelDef, encodingWithoutContinuousAxis, tooltipTitleWithFieldName); return { - transform: [...((_outerSpec$transform = outerSpec.transform) !== null && _outerSpec$transform !== void 0 ? _outerSpec$transform : []), ...bins, ...timeUnits, ...(aggregate.length === 0 ? [] : [{ + transform: [...(outerSpec.transform ?? []), ...bins, ...timeUnits, ...(aggregate.length === 0 ? [] : [{ aggregate, groupby }]), ...postAggregateCalculates], groupby, continuousAxisChannelDef, @@ -6522,22 +6454,22 @@ if (extent === 'stderr' || extent === 'stdev') { errorBarSpecificAggregate = [{ op: extent, field: continuousFieldName, - as: 'extent_' + continuousFieldName + as: `extent_${continuousFieldName}` }, { op: center, field: continuousFieldName, - as: 'center_' + continuousFieldName + as: `center_${continuousFieldName}` }]; postAggregateCalculates = [{ - calculate: "datum[\"center_".concat(continuousFieldName, "\"] + datum[\"extent_").concat(continuousFieldName, "\"]"), - as: 'upper_' + continuousFieldName + calculate: `datum["center_${continuousFieldName}"] + datum["extent_${continuousFieldName}"]`, + as: `upper_${continuousFieldName}` }, { - calculate: "datum[\"center_".concat(continuousFieldName, "\"] - datum[\"extent_").concat(continuousFieldName, "\"]"), - as: 'lower_' + continuousFieldName + calculate: `datum["center_${continuousFieldName}"] - datum["extent_${continuousFieldName}"]`, + as: `lower_${continuousFieldName}` }]; tooltipSummary = [{ fieldPrefix: 'center_', titlePrefix: titleCase(center) }, { @@ -6564,19 +6496,19 @@ } errorBarSpecificAggregate = [{ op: lowerExtentOp, field: continuousFieldName, - as: 'lower_' + continuousFieldName + as: `lower_${continuousFieldName}` }, { op: upperExtentOp, field: continuousFieldName, - as: 'upper_' + continuousFieldName + as: `upper_${continuousFieldName}` }, { op: centerOp, field: continuousFieldName, - as: 'center_' + continuousFieldName + as: `center_${continuousFieldName}` }]; tooltipSummary = [{ fieldPrefix: 'upper_', titlePrefix: title({ field: continuousFieldName, @@ -6611,35 +6543,35 @@ } if (inputType === 'aggregated-upper-lower') { tooltipSummary = []; postAggregateCalculates = [{ - calculate: "datum[\"".concat(continuousAxisChannelDef2.field, "\"]"), - as: 'upper_' + continuousFieldName + calculate: `datum["${continuousAxisChannelDef2.field}"]`, + as: `upper_${continuousFieldName}` }, { - calculate: "datum[\"".concat(continuousFieldName, "\"]"), - as: 'lower_' + continuousFieldName + calculate: `datum["${continuousFieldName}"]`, + as: `lower_${continuousFieldName}` }]; } else if (inputType === 'aggregated-error') { tooltipSummary = [{ fieldPrefix: '', titlePrefix: continuousFieldName }]; postAggregateCalculates = [{ - calculate: "datum[\"".concat(continuousFieldName, "\"] + datum[\"").concat(continuousAxisChannelDefError.field, "\"]"), - as: 'upper_' + continuousFieldName + calculate: `datum["${continuousFieldName}"] + datum["${continuousAxisChannelDefError.field}"]`, + as: `upper_${continuousFieldName}` }]; if (continuousAxisChannelDefError2) { postAggregateCalculates.push({ - calculate: "datum[\"".concat(continuousFieldName, "\"] + datum[\"").concat(continuousAxisChannelDefError2.field, "\"]"), - as: 'lower_' + continuousFieldName + calculate: `datum["${continuousFieldName}"] + datum["${continuousAxisChannelDefError2.field}"]`, + as: `lower_${continuousFieldName}` }); } else { postAggregateCalculates.push({ - calculate: "datum[\"".concat(continuousFieldName, "\"] - datum[\"").concat(continuousAxisChannelDefError.field, "\"]"), - as: 'lower_' + continuousFieldName + calculate: `datum["${continuousFieldName}"] - datum["${continuousAxisChannelDefError.field}"]`, + as: `lower_${continuousFieldName}` }); } } for (const postAggregateCalculate of postAggregateCalculates) { @@ -6657,11 +6589,11 @@ tooltipTitleWithFieldName }; } function getTitlePrefix(center, extent, operation) { - return titleCase(center) + ' ' + operation + ' ' + extent; + return `${titleCase(center)} ${operation} ${extent}`; } const ERRORBAND = 'errorband'; const ERRORBAND_PARTS = ['band', 'borders']; const errorBandNormalizer = new CompositeMarkNormalizer(ERRORBAND, normalizeErrorBand); @@ -6870,14 +6802,50 @@ type: 1, values: 1, zindex: 1 }; + const SELECTION_ID = '_vgsid_'; + const defaultConfig$1 = { + point: { + on: 'click', + fields: [SELECTION_ID], + toggle: 'event.shiftKey', + resolve: 'global', + clear: 'dblclick' + }, + interval: { + on: '[mousedown, window:mouseup] > window:mousemove!', + encodings: ['x', 'y'], + translate: '[mousedown, window:mouseup] > window:mousemove!', + zoom: 'wheel!', + mark: { + fill: '#333', + fillOpacity: 0.125, + stroke: 'white' + }, + resolve: 'global', + clear: 'dblclick' + } + }; + function isLegendBinding(bind) { + return !!bind && (bind === 'legend' || !!bind.legend); + } + function isLegendStreamBinding(bind) { + return isLegendBinding(bind) && vegaUtil.isObject(bind); + } + function isSelectionParameter(param) { + return !!param['select']; + } + function assembleParameterSignals(params) { const signals = []; for (const param of params || []) { + // Selection parameters are handled separately via assembleSelectionTopLevelSignals + // and assembleSignals methods registered on the Model. + if (isSelectionParameter(param)) continue; const { expr, bind, ...rest } = param; @@ -6903,48 +6871,10 @@ } return signals; } - const SELECTION_ID = '_vgsid_'; - const defaultConfig = { - single: { - on: 'click', - fields: [SELECTION_ID], - resolve: 'global', - empty: 'all', - clear: 'dblclick' - }, - multi: { - on: 'click', - fields: [SELECTION_ID], - toggle: 'event.shiftKey', - resolve: 'global', - empty: 'all', - clear: 'dblclick' - }, - interval: { - on: '[mousedown, window:mouseup] > window:mousemove!', - encodings: ['x', 'y'], - translate: '[mousedown, window:mouseup] > window:mousemove!', - zoom: 'wheel!', - mark: { - fill: '#333', - fillOpacity: 0.125, - stroke: 'white' - }, - resolve: 'global', - clear: 'dblclick' - } - }; - function isLegendBinding(bind) { - return !!bind && (bind === 'legend' || !!bind.legend); - } - function isLegendStreamBinding(bind) { - return isLegendBinding(bind) && isObject(bind); - } - /** * Base layout mixins for V/HConcatSpec, which should not have RowCol<T> generic fo its property. */ /** @@ -6975,11 +6905,11 @@ function isFitType(autoSizeType) { return autoSizeType === 'fit' || autoSizeType === 'fit-x' || autoSizeType === 'fit-y'; } function getFitType(sizeType) { - return sizeType ? "fit-".concat(getPositionScaleChannel(sizeType)) : 'fit'; + return sizeType ? `fit-${getPositionScaleChannel(sizeType)}` : 'fit'; } const TOP_LEVEL_PROPERTIES = ['background', 'padding' // We do not include "autosize" here as it is supported by only unit and layer specs and thus need to be normalized ]; function extractTopLevelProperties(t, includeParams) { const o = {}; @@ -7000,11 +6930,11 @@ /** * Common properties for all types of specification */ function isStep(size) { - return isObject(size) && size['step'] !== undefined; + return vegaUtil.isObject(size) && size['step'] !== undefined; } // TODO(https://github.com/vega/vega-lite/issues/2503): Make this generic so we can support some form of top-down sizing. /** * Common properties for specifying width and height of unit and layer specifications. */ @@ -7046,16 +6976,14 @@ for (const prop of COMPOSITION_LAYOUT_PROPERTIES) { if (spec[prop] !== undefined) { if (prop === 'spacing') { - var _spacing$row, _spacing$column; - const spacing = spec[prop]; - layout[prop] = isNumber(spacing) ? spacing : { - row: (_spacing$row = spacing.row) !== null && _spacing$row !== void 0 ? _spacing$row : spacingConfig, - column: (_spacing$column = spacing.column) !== null && _spacing$column !== void 0 ? _spacing$column : spacingConfig + layout[prop] = vegaUtil.isNumber(spacing) ? spacing : { + row: spacing.row ?? spacingConfig, + column: spacing.column ?? spacingConfig }; } else { layout[prop] = spec[prop]; } } @@ -7063,34 +6991,30 @@ return layout; } function getViewConfigContinuousSize(viewConfig, channel) { - var _viewConfig$channel; - - return (_viewConfig$channel = viewConfig[channel]) !== null && _viewConfig$channel !== void 0 ? _viewConfig$channel : viewConfig[channel === 'width' ? 'continuousWidth' : 'continuousHeight']; // get width/height for backwards compatibility + return viewConfig[channel] ?? viewConfig[channel === 'width' ? 'continuousWidth' : 'continuousHeight']; // get width/height for backwards compatibility } function getViewConfigDiscreteStep(viewConfig, channel) { const size = getViewConfigDiscreteSize(viewConfig, channel); return isStep(size) ? size.step : DEFAULT_STEP; } function getViewConfigDiscreteSize(viewConfig, channel) { - var _viewConfig$channel2; + const size = viewConfig[channel] ?? viewConfig[channel === 'width' ? 'discreteWidth' : 'discreteHeight']; // get width/height for backwards compatibility - const size = (_viewConfig$channel2 = viewConfig[channel]) !== null && _viewConfig$channel2 !== void 0 ? _viewConfig$channel2 : viewConfig[channel === 'width' ? 'discreteWidth' : 'discreteHeight']; // get width/height for backwards compatibility - return getFirstDefined(size, { step: viewConfig.step }); } const DEFAULT_STEP = 20; const defaultViewConfig = { continuousWidth: 200, continuousHeight: 200, step: DEFAULT_STEP }; - const defaultConfig$1 = { + const defaultConfig = { background: 'white', padding: 5, timeFormat: '%b %d, %Y', countTitle: 'Count of Records', view: defaultViewConfig, @@ -7145,11 +7069,11 @@ labelPadding: 10 }, headerColumn: {}, headerRow: {}, headerFacet: {}, - selection: defaultConfig, + selection: defaultConfig$1, style: {}, title: {}, facet: { spacing: DEFAULT_SPACING }, @@ -7195,11 +7119,11 @@ }; function colorSignalConfig(color = {}) { return { signals: [{ name: 'color', - value: isObject(color) ? { ...DEFAULT_COLOR, + value: vegaUtil.isObject(color) ? { ...DEFAULT_COLOR, ...color } : DEFAULT_COLOR }], mark: { color: { @@ -7281,11 +7205,11 @@ } function fontSizeSignalConfig(fontSize) { return { signals: [{ name: 'fontSize', - value: isObject(fontSize) ? { ...DEFAULT_FONT_SIZE, + value: vegaUtil.isObject(fontSize) ? { ...DEFAULT_FONT_SIZE, ...fontSize } : DEFAULT_FONT_SIZE }], text: { fontSize: { @@ -7371,24 +7295,31 @@ function initConfig(specifiedConfig = {}) { const { color, font, fontSize, + selection, ...restConfig } = specifiedConfig; - const mergedConfig = mergeConfig({}, defaultConfig$1, font ? fontConfig(font) : {}, color ? colorSignalConfig(color) : {}, fontSize ? fontSizeSignalConfig(fontSize) : {}, restConfig || {}); + const mergedConfig = vegaUtil.mergeConfig({}, duplicate(defaultConfig), font ? fontConfig(font) : {}, color ? colorSignalConfig(color) : {}, fontSize ? fontSizeSignalConfig(fontSize) : {}, restConfig || {}); // mergeConfig doesn't recurse and overrides object values. + + if (selection) { + vega.writeConfig(mergedConfig, 'selection', selection, true); + } + const outputConfig = omit(mergedConfig, configPropsWithExpr); for (const prop of ['background', 'lineBreak', 'padding']) { if (mergedConfig[prop]) { outputConfig[prop] = signalRefOrValue(mergedConfig[prop]); } } for (const markConfigType of MARK_CONFIGS) { if (mergedConfig[markConfigType]) { - outputConfig[markConfigType] = replaceExprRefInIndex(mergedConfig[markConfigType]); + // FIXME: outputConfig[markConfigType] expects that types are replaced recursively but replaceExprRef only replaces one level deep + outputConfig[markConfigType] = replaceExprRef(mergedConfig[markConfigType]); } } for (const axisConfigType of AXIS_CONFIGS) { if (mergedConfig[axisConfigType]) { @@ -7396,32 +7327,32 @@ } } for (const headerConfigType of HEADER_CONFIGS) { if (mergedConfig[headerConfigType]) { - outputConfig[headerConfigType] = replaceExprRefInIndex(mergedConfig[headerConfigType]); + outputConfig[headerConfigType] = replaceExprRef(mergedConfig[headerConfigType]); } } if (mergedConfig.legend) { - outputConfig.legend = replaceExprRefInIndex(mergedConfig.legend); + outputConfig.legend = replaceExprRef(mergedConfig.legend); } if (mergedConfig.scale) { - outputConfig.scale = replaceExprRefInIndex(mergedConfig.scale); + outputConfig.scale = replaceExprRef(mergedConfig.scale); } if (mergedConfig.style) { outputConfig.style = getStyleConfigInternal(mergedConfig.style); } if (mergedConfig.title) { - outputConfig.title = replaceExprRefInIndex(mergedConfig.title); + outputConfig.title = replaceExprRef(mergedConfig.title); } if (mergedConfig.view) { - outputConfig.view = replaceExprRefInIndex(mergedConfig.view); + outputConfig.view = replaceExprRef(mergedConfig.view); } return outputConfig; } const MARK_STYLES = ['view', ...PRIMITIVE_MARKS]; @@ -7458,11 +7389,11 @@ if (config.mark) { for (const prop of VL_ONLY_MARK_CONFIG_PROPERTIES) { delete config.mark[prop]; } - if (config.mark.tooltip && isObject(config.mark.tooltip)) { + if (config.mark.tooltip && vegaUtil.isObject(config.mark.tooltip)) { delete config.mark.tooltip; } } if (config.params) { @@ -7497,11 +7428,11 @@ } redirectTitleConfig(config); // Remove empty config objects. for (const prop in config) { - if (isObject(config[prop]) && isEmpty(config[prop])) { + if (vegaUtil.isObject(config[prop]) && isEmpty(config[prop])) { delete config[prop]; } } return isEmpty(config) ? undefined : config; @@ -7542,26 +7473,22 @@ } } function redirectConfigToStyleConfig(config, prop, // string = composite mark toProp, compositeMarkPart) { - var _toProp; - const propConfig = compositeMarkPart ? config[prop][compositeMarkPart] : config[prop]; if (prop === 'view') { toProp = 'cell'; // View's default style is "cell" } const style = { ...propConfig, - ...config.style[(_toProp = toProp) !== null && _toProp !== void 0 ? _toProp : prop] + ...config.style[toProp ?? prop] }; // set config.style if it is not an empty object if (!isEmpty(style)) { - var _toProp2; - - config.style[(_toProp2 = toProp) !== null && _toProp2 !== void 0 ? _toProp2 : prop] = style; + config.style[toProp ?? prop] = style; } if (!compositeMarkPart) { // For composite mark, so don't delete the whole config yet as we have to do multiple redirections. delete config[prop]; @@ -7585,11 +7512,11 @@ function isRepeatSpec(spec) { return 'repeat' in spec; } function isLayerRepeatSpec(spec) { - return !isArray(spec.repeat) && spec.repeat['layer']; + return !vegaUtil.isArray(spec.repeat) && spec.repeat['layer']; } class SpecMapper { map(spec, params) { if (isFacetSpec(spec)) { @@ -7668,20 +7595,24 @@ normalize: 1 }; function isStackOffset(s) { return s in STACK_OFFSET_INDEX; } - const STACKABLE_MARKS = new Set([ARC, BAR, AREA, RULE, POINT, CIRCLE, SQUARE, LINE, TEXT$1, TICK]); + const STACKABLE_MARKS = new Set([ARC, BAR, AREA, RULE, POINT, CIRCLE, SQUARE, LINE, TEXT, TICK]); const STACK_BY_DEFAULT_MARKS = new Set([BAR, AREA, ARC]); + function isUnbinnedQuantitative(channelDef) { + return isFieldDef(channelDef) && channelDefType(channelDef) === 'quantitative' && !channelDef.bin; + } + function potentialStackedChannel(encoding, x) { const y = x === 'x' ? 'y' : 'radius'; const xDef = encoding[x]; const yDef = encoding[y]; if (isFieldDef(xDef) && isFieldDef(yDef)) { - if (channelDefType(xDef) === 'quantitative' && channelDefType(yDef) === 'quantitative') { + if (isUnbinnedQuantitative(xDef) && isUnbinnedQuantitative(yDef)) { if (xDef.stack) { return x; } else if (yDef.stack) { return y; } @@ -7701,18 +7632,18 @@ return y; } else if (yScale && yScale !== 'linear') { return x; } } - } else if (channelDefType(xDef) === 'quantitative') { + } else if (isUnbinnedQuantitative(xDef)) { return x; - } else if (channelDefType(yDef) === 'quantitative') { + } else if (isUnbinnedQuantitative(yDef)) { return y; } - } else if (channelDefType(xDef) === 'quantitative') { + } else if (isUnbinnedQuantitative(xDef)) { return x; - } else if (channelDefType(yDef) === 'quantitative') { + } else if (isUnbinnedQuantitative(yDef)) { return y; } return undefined; } @@ -7734,10 +7665,12 @@ } // Note: CompassQL uses this method and only pass in required properties of each argument object. // If required properties change, make sure to update CompassQL. function stack(m, encoding, opt = {}) { + var _stackedFieldDef$scal, _stackedFieldDef$scal2; + const mark = isMarkDef(m) ? m.type : m; // Should have stackable mark if (!STACKABLE_MARKS.has(mark)) { return null; } // Run potential stacked twice, one for Cartesian and another for Polar, @@ -7768,11 +7701,11 @@ const stackBy = NONPOSITION_CHANNELS.reduce((sc, channel) => { // Ignore tooltip in stackBy (https://github.com/vega/vega-lite/issues/4001) if (channel !== 'tooltip' && channelHasField(encoding, channel)) { const channelDef = encoding[channel]; - for (const cDef of array(channelDef)) { + for (const cDef of vegaUtil.array(channelDef)) { const fieldDef = getFieldDef(cDef); if (fieldDef.aggregate) { continue; } // Check whether the channel's field is identical to x/y's field or if the channel is a repeat @@ -7795,30 +7728,29 @@ }, []); // Automatically determine offset let offset; if (stackedFieldDef.stack !== undefined) { - if (isBoolean(stackedFieldDef.stack)) { + if (vegaUtil.isBoolean(stackedFieldDef.stack)) { offset = stackedFieldDef.stack ? 'zero' : null; } else { offset = stackedFieldDef.stack; } - } else if (stackBy.length > 0 && STACK_BY_DEFAULT_MARKS.has(mark)) { - // Bar and Area with sum ops are automatically stacked by default + } else if (STACK_BY_DEFAULT_MARKS.has(mark)) { offset = 'zero'; } if (!offset || !isStackOffset(offset)) { return null; } - if (isAggregate(encoding) && stackBy.length === 0) { + if (isAggregate$1(encoding) && stackBy.length === 0) { return null; } // warn when stacking non-linear - if (stackedFieldDef.scale && stackedFieldDef.scale.type && stackedFieldDef.scale.type !== ScaleType.LINEAR) { + if (stackedFieldDef !== null && stackedFieldDef !== void 0 && (_stackedFieldDef$scal = stackedFieldDef.scale) !== null && _stackedFieldDef$scal !== void 0 && _stackedFieldDef$scal.type && (stackedFieldDef === null || stackedFieldDef === void 0 ? void 0 : (_stackedFieldDef$scal2 = stackedFieldDef.scale) === null || _stackedFieldDef$scal2 === void 0 ? void 0 : _stackedFieldDef$scal2.type) !== ScaleType.LINEAR) { if (opt.disallowNonLinearStack) { return null; } else { warn(cannotStackNonLinearScale(stackedFieldDef.scale.type)); } @@ -7875,19 +7807,19 @@ return { opacity: 0 }; } else if (markDef.point) { // truthy : true or object - return isObject(markDef.point) ? markDef.point : {}; + return vegaUtil.isObject(markDef.point) ? markDef.point : {}; } else if (markDef.point !== undefined) { // false or null return null; } else { // undefined (not disabled) if (markConfig.point || encoding.shape) { // enable point overlay if config[mark].point is truthy or if encoding.shape is provided - return isObject(markConfig.point) ? markConfig.point : {}; + return vegaUtil.isObject(markConfig.point) ? markConfig.point : {}; } // markDef.point is defined as falsy return undefined; } @@ -7941,16 +7873,16 @@ } return false; } - run(spec, params, normalize) { + run(spec, normParams, normalize) { const { config - } = params; + } = normParams; const { - selection, + params, projection, mark, encoding: e, ...outerSpec } = spec; // Need to call normalizeEncoding because we need the inferred types to correctly determine stack @@ -7959,12 +7891,12 @@ const markDef = isMarkDef(mark) ? mark : { type: mark }; const pointOverlay = getPointOverlay(markDef, config[markDef.type], encoding); const lineOverlay = markDef.type === 'area' && getLineOverlay(markDef, config[markDef.type]); - const layer = [{ ...(selection ? { - selection + const layer = [{ ...(params ? { + params } : {}), mark: dropLineAndPoint({ // TODO: extract this 0.7 to be shared with default opacity for point/tick/... ...(markDef.type === 'area' && markDef.opacity === undefined && markDef.fillOpacity === undefined ? { opacity: 0.7 } : {}), @@ -8020,85 +7952,17 @@ }); } return normalize({ ...outerSpec, layer - }, { ...params, + }, { ...normParams, config: dropLineAndPointFromConfig(config) }); } } - // this is not accurate, but it's not worth making it accurate - class RangeStepNormalizer { - constructor() { - _defineProperty(this, "name", 'RangeStep'); - } - - hasMatchingType(spec) { - if (isUnitSpec(spec) && spec.encoding) { - for (const channel of POSITION_SCALE_CHANNELS) { - const def = spec.encoding[channel]; - - if (def && isFieldOrDatumDef(def)) { - var _def$scale; - - if (def === null || def === void 0 ? void 0 : (_def$scale = def.scale) === null || _def$scale === void 0 ? void 0 : _def$scale['rangeStep']) { - return true; - } - } - } - } - - return false; - } - - run(spec) { - const sizeMixins = {}; - let encoding = { ...spec.encoding - }; - - for (const channel of POSITION_SCALE_CHANNELS) { - const sizeType = getSizeChannel(channel); - const def = encoding[channel]; - - if (def && isFieldOrDatumDef(def)) { - var _def$scale2; - - if (def === null || def === void 0 ? void 0 : (_def$scale2 = def.scale) === null || _def$scale2 === void 0 ? void 0 : _def$scale2['rangeStep']) { - const { - scale, - ...defWithoutScale - } = def; - const { - rangeStep, - ...scaleWithoutRangeStep - } = scale; - sizeMixins[sizeType] = { - step: scale['rangeStep'] - }; - warn(RANGE_STEP_DEPRECATED); - encoding = { ...encoding, - [channel]: { ...defWithoutScale, - ...(isEmpty(scaleWithoutRangeStep) ? {} : { - scale: scaleWithoutRangeStep - }) - } - }; - } - } - } - - return { ...sizeMixins, - ...spec, - encoding - }; - } - - } - function replaceRepeaterInFacet(facet, repeater) { if (!repeater) { return facet; } @@ -8212,17 +8076,16 @@ function replaceRepeaterInMapping(mapping, repeater) { const out = {}; for (const channel in mapping) { - if (has(mapping, channel)) { + if (vegaUtil.hasOwnProperty(mapping, channel)) { const channelDef = mapping[channel]; - if (isArray(channelDef)) { + if (vegaUtil.isArray(channelDef)) { // array cannot have condition - out[channel] = channelDef. // somehow we need to cast it here - map(cd => replaceRepeaterInChannelDef(cd, repeater)).filter(cd => cd); + out[channel] = channelDef.map(cd => replaceRepeaterInChannelDef(cd, repeater)).filter(cd => cd); } else { const cd = replaceRepeaterInChannelDef(channelDef, repeater); if (cd !== undefined) { out[channel] = cd; @@ -8244,11 +8107,11 @@ const { encoding, mark } = spec; - if (mark === 'line') { + if (mark === 'line' || isMarkDef(mark) && mark.type === 'line') { for (const channel of SECONDARY_RANGE_CHANNEL) { const mainChannel = getMainRangeChannel(channel); const mainChannelDef = encoding[mainChannel]; if (encoding[channel]) { @@ -8263,25 +8126,28 @@ return false; } run(spec, params, normalize) { const { - encoding + encoding, + mark } = spec; warn(lineWithRange(!!encoding.x2, !!encoding.y2)); return normalize({ ...spec, - mark: 'rule' + mark: vegaUtil.isObject(mark) ? { ...mark, + type: 'rule' + } : 'rule' }, params); } } class CoreNormalizer extends SpecMapper { constructor(...args) { super(...args); - _defineProperty(this, "nonFacetUnitNormalizers", [boxPlotNormalizer, errorBarNormalizer, errorBandNormalizer, new PathOverlayNormalizer(), new RuleForRangedLineNormalizer(), new RangeStepNormalizer()]); + _defineProperty(this, "nonFacetUnitNormalizers", [boxPlotNormalizer, errorBarNormalizer, errorBandNormalizer, new PathOverlayNormalizer(), new RuleForRangedLineNormalizer()]); } map(spec, params) { // Special handling for a faceted unit spec as it can return a facet spec, not just a layer or unit spec like a normal unit spec. if (isUnitSpec(spec)) { @@ -8369,11 +8235,11 @@ return { ...rest, layer: layer.map(layerValue => { const childRepeater = { ...repeater, layer: layerValue }; - const childName = (childSpec.name || '') + repeaterPrefix + "child__layer_".concat(varName(layerValue)); + const childName = `${(childSpec.name || '') + repeaterPrefix}child__layer_${varName(layerValue)}`; const child = this.mapLayerOrUnit(childSpec, { ...params, repeater: childRepeater, repeaterPrefix: childName }); child.name = childName; @@ -8382,44 +8248,42 @@ }; } } mapNonLayerRepeat(spec, params) { - var _childSpec$data; - const { repeat, spec: childSpec, data, ...remainingProperties } = spec; - if (!isArray(repeat) && spec.columns) { + if (!vegaUtil.isArray(repeat) && spec.columns) { // is repeat with row/column spec = omit(spec, ['columns']); warn(columnsNotSupportByRowCol('repeat')); } const concat = []; const { repeater = {}, repeaterPrefix = '' } = params; - const row = !isArray(repeat) && repeat.row || [repeater ? repeater.row : null]; - const column = !isArray(repeat) && repeat.column || [repeater ? repeater.column : null]; - const repeatValues = isArray(repeat) && repeat || [repeater ? repeater.repeat : null]; // cross product + const row = !vegaUtil.isArray(repeat) && repeat.row || [repeater ? repeater.row : null]; + const column = !vegaUtil.isArray(repeat) && repeat.column || [repeater ? repeater.column : null]; + const repeatValues = vegaUtil.isArray(repeat) && repeat || [repeater ? repeater.repeat : null]; // cross product for (const repeatValue of repeatValues) { for (const rowValue of row) { for (const columnValue of column) { const childRepeater = { repeat: repeatValue, row: rowValue, column: columnValue, layer: repeater.layer }; - const childName = (childSpec.name || '') + repeaterPrefix + 'child__' + (isArray(repeat) ? "".concat(varName(repeatValue)) : (repeat.row ? "row_".concat(varName(rowValue)) : '') + (repeat.column ? "column_".concat(varName(columnValue)) : '')); + const childName = (childSpec.name || '') + repeaterPrefix + 'child__' + (vegaUtil.isArray(repeat) ? `${varName(repeatValue)}` : (repeat.row ? `row_${varName(rowValue)}` : '') + (repeat.column ? `column_${varName(columnValue)}` : '')); const child = this.map(childSpec, { ...params, repeater: childRepeater, repeaterPrefix: childName }); child.name = childName; // we move data up @@ -8427,13 +8291,13 @@ concat.push(omit(child, ['data'])); } } } - const columns = isArray(repeat) ? spec.columns : repeat.column ? repeat.column.length : 1; + const columns = vegaUtil.isArray(repeat) ? spec.columns : repeat.column ? repeat.column.length : 1; return { - data: (_childSpec$data = childSpec.data) !== null && _childSpec$data !== void 0 ? _childSpec$data : data, + data: childSpec.data ?? data, // data from child spec should have precedence align: 'all', ...remainingProperties, columns, concat @@ -8482,11 +8346,11 @@ }, { config }); } - mapFacetedUnit(spec, params) { + mapFacetedUnit(spec, normParams) { // New encoding in the inside spec should not contain row / column // as row/column should be moved to facet const { row, column, @@ -8498,23 +8362,23 @@ mark, width, projection, height, view, - selection, + params, encoding: _, ...outerSpec } = spec; const { facetMapping, layout } = this.getFacetMappingAndLayout({ row, column, facet - }, params); - const newEncoding = replaceRepeaterInEncoding(encoding, params.repeater); + }, normParams); + const newEncoding = replaceRepeaterInEncoding(encoding, normParams.repeater); return this.mapFacet({ ...outerSpec, ...layout, // row / column has higher precedence than facet facet: facetMapping, spec: { ...(width ? { @@ -8529,15 +8393,15 @@ ...(projection ? { projection } : {}), mark, encoding: newEncoding, - ...(selection ? { - selection + ...(params ? { + params } : {}) } - }, params); + }, normParams); } getFacetMappingAndLayout(facets, params) { const { row, @@ -8566,13 +8430,11 @@ } = def; facetMapping[channel] = defWithoutLayout; for (const prop of ['align', 'center', 'spacing']) { if (def[prop] !== undefined) { - var _layout$prop; - - layout[prop] = (_layout$prop = layout[prop]) !== null && _layout$prop !== void 0 ? _layout$prop : {}; + layout[prop] ?? (layout[prop] = {}); layout[prop][channel] = def[prop]; } } } } @@ -8662,11 +8524,11 @@ ...channelDef.condition } }; } else if (channelDef || channelDef === null) { merged[channel] = channelDef; - } else if (layer || isValueDef(parentChannelDef) || isSignalRef(parentChannelDef) || isFieldOrDatumDef(parentChannelDef) || isArray(parentChannelDef)) { + } else if (layer || isValueDef(parentChannelDef) || isSignalRef(parentChannelDef) || isFieldOrDatumDef(parentChannelDef) || vegaUtil.isArray(parentChannelDef)) { merged[channel] = parentChannelDef; } } } else { merged = encoding; @@ -8686,13 +8548,348 @@ parentProjection, projection })); } - return projection !== null && projection !== void 0 ? projection : parentProjection; + return projection ?? parentProjection; } + function isFilter(t) { + return 'filter' in t; + } + function isImputeSequence(t) { + return (t === null || t === void 0 ? void 0 : t['stop']) !== undefined; + } + function isLookup(t) { + return 'lookup' in t; + } + function isLookupData(from) { + return 'data' in from; + } + function isLookupSelection(from) { + return 'param' in from; + } + function isPivot(t) { + return 'pivot' in t; + } + function isDensity(t) { + return 'density' in t; + } + function isQuantile(t) { + return 'quantile' in t; + } + function isRegression(t) { + return 'regression' in t; + } + function isLoess(t) { + return 'loess' in t; + } + function isSample(t) { + return 'sample' in t; + } + function isWindow(t) { + return 'window' in t; + } + function isJoinAggregate(t) { + return 'joinaggregate' in t; + } + function isFlatten(t) { + return 'flatten' in t; + } + function isCalculate(t) { + return 'calculate' in t; + } + function isBin(t) { + return 'bin' in t; + } + function isImpute(t) { + return 'impute' in t; + } + function isTimeUnit(t) { + return 'timeUnit' in t; + } + function isAggregate(t) { + return 'aggregate' in t; + } + function isStack(t) { + return 'stack' in t; + } + function isFold(t) { + return 'fold' in t; + } + function normalizeTransform(transform) { + return transform.map(t => { + if (isFilter(t)) { + return { + filter: normalizeLogicalComposition(t.filter, normalizePredicate$1) + }; + } + + return t; + }); + } + + class SelectionCompatibilityNormalizer extends SpecMapper { + map(spec, normParams) { + normParams.emptySelections ?? (normParams.emptySelections = {}); + normParams.selectionPredicates ?? (normParams.selectionPredicates = {}); + spec = normalizeTransforms(spec, normParams); + return super.map(spec, normParams); + } + + mapLayerOrUnit(spec, normParams) { + spec = normalizeTransforms(spec, normParams); + + if (spec.encoding) { + const encoding = {}; + + for (const [channel, enc] of entries$1(spec.encoding)) { + encoding[channel] = normalizeChannelDef(enc, normParams); + } + + spec = { ...spec, + encoding + }; + } + + return super.mapLayerOrUnit(spec, normParams); + } + + mapUnit(spec, normParams) { + const { + selection, + ...rest + } = spec; + + if (selection) { + return { ...rest, + params: entries$1(selection).map(([name, selDef]) => { + const { + init: value, + bind, + empty, + ...select + } = selDef; + + if (select.type === 'single') { + select.type = 'point'; + select.toggle = false; + } else if (select.type === 'multi') { + select.type = 'point'; + } // Propagate emptiness forwards and backwards + + + normParams.emptySelections[name] = empty !== 'none'; + + for (const pred of vals(normParams.selectionPredicates[name] ?? {})) { + pred.empty = empty !== 'none'; + } + + return { + name, + value, + select, + bind + }; + }) + }; + } + + return spec; + } + + } + + function normalizeTransforms(spec, normParams) { + const { + transform: tx, + ...rest + } = spec; + + if (tx) { + const transform = tx.map(t => { + if (isFilter(t)) { + return { + filter: normalizePredicate(t, normParams) + }; + } else if (isBin(t) && isBinParams(t.bin)) { + return { ...t, + bin: normalizeBinExtent(t.bin) + }; + } else if (isLookup(t)) { + const { + selection: param, + ...from + } = t.from; + return param ? { ...t, + from: { + param, + ...from + } + } : t; + } + + return t; + }); + return { ...rest, + transform + }; + } + + return spec; + } + + function normalizeChannelDef(obj, normParams) { + var _enc$scale, _enc$scale$domain; + + const enc = duplicate(obj); + + if (isFieldDef(enc) && isBinParams(enc.bin)) { + enc.bin = normalizeBinExtent(enc.bin); + } + + if (isScaleFieldDef(enc) && (_enc$scale = enc.scale) !== null && _enc$scale !== void 0 && (_enc$scale$domain = _enc$scale.domain) !== null && _enc$scale$domain !== void 0 && _enc$scale$domain.selection) { + const { + selection: param, + ...domain + } = enc.scale.domain; + enc.scale.domain = { ...domain, + ...(param ? { + param + } : {}) + }; + } + + if (isConditionalDef(enc)) { + if (vega.isArray(enc.condition)) { + enc.condition = enc.condition.map(c => { + const { + selection, + param, + test, + ...cond + } = c; + return param ? c : { ...cond, + test: normalizePredicate(c, normParams) + }; + }); + } else { + const { + selection, + param, + test, + ...cond + } = normalizeChannelDef(enc.condition, normParams); + enc.condition = param ? enc.condition : { ...cond, + test: normalizePredicate(enc.condition, normParams) + }; + } + } + + return enc; + } + + function normalizeBinExtent(bin) { + const ext = bin.extent; + + if (ext !== null && ext !== void 0 && ext.selection) { + const { + selection: param, + ...rest + } = ext; + return { ...bin, + extent: { ...rest, + param + } + }; + } + + return bin; + } + + function normalizePredicate(op, normParams) { + // Normalize old compositions of selection names (e.g., selection: {and: ["one", "two"]}) + const normalizeSelectionComposition = o => { + return normalizeLogicalComposition(o, param => { + var _normParams$selection; + + const empty = normParams.emptySelections[param] ?? true; + const pred = { + param, + empty + }; + (_normParams$selection = normParams.selectionPredicates)[param] ?? (_normParams$selection[param] = []); + normParams.selectionPredicates[param].push(pred); + return pred; + }); + }; + + return op.selection ? normalizeSelectionComposition(op.selection) : normalizeLogicalComposition(op.test || op.filter, o => o.selection ? normalizeSelectionComposition(o.selection) : o); + } + + class TopLevelSelectionsNormalizer extends SpecMapper { + map(spec, normParams) { + const selections = normParams.selections ?? []; + + if (spec.params && !isUnitSpec(spec)) { + const params = []; + + for (const param of spec.params) { + if (isSelectionParameter(param)) { + selections.push(param); + } else { + params.push(param); + } + } + + spec.params = params; + } + + normParams.selections = selections; + return super.map(spec, addSpecNameToParams(spec, normParams)); + } + + mapUnit(spec, normParams) { + const selections = normParams.selections; + if (!selections || !selections.length) return spec; + const path = (normParams.path ?? []).concat(spec.name); + const params = []; + + for (const selection of selections) { + // By default, apply selections to all unit views. + if (!selection.views || !selection.views.length) { + params.push(selection); + } else { + for (const view of selection.views) { + // view is either a specific unit name, or a partial path through the spec tree. + if (vega.isString(view) && (view === spec.name || path.indexOf(view) >= 0) || vega.isArray(view) && view.map(v => path.indexOf(v)).every((v, i, arr) => v !== -1 && (i === 0 || v > arr[i - 1]))) { + params.push(selection); + } + } + } + } + + if (params.length) spec.params = params; + return spec; + } + + } + + for (const method of ['mapFacet', 'mapRepeat', 'mapHConcat', 'mapVConcat', 'mapLayer']) { + const proto = TopLevelSelectionsNormalizer.prototype[method]; + + TopLevelSelectionsNormalizer.prototype[method] = function (spec, params) { + return proto.call(this, spec, addSpecNameToParams(spec, params)); + }; + } + + function addSpecNameToParams(spec, params) { + return spec.name ? { ...params, + path: (params.path ?? []).concat(spec.name) + } : params; + } + function normalize(spec, config) { if (config === undefined) { config = initConfig(spec.config); } @@ -8710,25 +8907,29 @@ ...(autosize ? { autosize } : {}) }; } - const normalizer = new CoreNormalizer(); + const coreNormalizer = new CoreNormalizer(); + const selectionCompatNormalizer = new SelectionCompatibilityNormalizer(); + const topLevelSelectionNormalizer = new TopLevelSelectionsNormalizer(); /** * Decompose extended unit specs into composition of pure unit specs. + * And push top-level selection definitions down to unit specs. */ function normalizeGenericSpec(spec, config = {}) { - return normalizer.map(spec, { + const normParams = { config - }); + }; + return topLevelSelectionNormalizer.map(coreNormalizer.map(selectionCompatNormalizer.map(spec, normParams), normParams), normParams); } function _normalizeAutoSize(autosize) { - return isString(autosize) ? { + return vegaUtil.isString(autosize) ? { type: autosize - } : autosize !== null && autosize !== void 0 ? autosize : {}; + } : autosize ?? {}; } /** * Normalize autosize and deal with width or height == "container". */ @@ -8813,12 +9014,10 @@ clone() { return new Split(duplicate(this.explicit), duplicate(this.implicit)); } combine() { - // FIXME remove "as any". - // Add "as any" to avoid an error "Spread types may only be created from object types". return { ...this.explicit, // Explicit properties comes first ...this.implicit }; } @@ -8846,28 +9045,34 @@ explicit: false, value: undefined }; } - setWithExplicit(key, value) { - if (value.value !== undefined) { - this.set(key, value.value, value.explicit); + setWithExplicit(key, { + value, + explicit + }) { + if (value !== undefined) { + this.set(key, value, explicit); } } set(key, value, explicit) { delete this[explicit ? 'implicit' : 'explicit'][key]; this[explicit ? 'explicit' : 'implicit'][key] = value; return this; } - copyKeyFromSplit(key, s) { + copyKeyFromSplit(key, { + explicit, + implicit + }) { // Explicit has higher precedence - if (s.explicit[key] !== undefined) { - this.set(key, s.explicit[key], true); - } else if (s.implicit[key] !== undefined) { - this.set(key, s.implicit[key], false); + if (explicit[key] !== undefined) { + this.set(key, explicit[key], true); + } else if (implicit[key] !== undefined) { + this.set(key, implicit[key], false); } } copyKeyFromObject(key, s) { // Explicit has higher precedence @@ -8993,95 +9198,20 @@ DataSourceType[DataSourceType["Row"] = 2] = "Row"; DataSourceType[DataSourceType["Column"] = 3] = "Column"; DataSourceType[DataSourceType["Lookup"] = 4] = "Lookup"; })(DataSourceType || (DataSourceType = {})); - function isFilter(t) { - return 'filter' in t; - } - function isImputeSequence(t) { - return (t === null || t === void 0 ? void 0 : t['stop']) !== undefined; - } - function isLookup(t) { - return 'lookup' in t; - } - function isLookupData(from) { - return 'data' in from; - } - function isLookupSelection(from) { - return 'selection' in from; - } - function isPivot(t) { - return 'pivot' in t; - } - function isDensity(t) { - return 'density' in t; - } - function isQuantile(t) { - return 'quantile' in t; - } - function isRegression(t) { - return 'regression' in t; - } - function isLoess(t) { - return 'loess' in t; - } - function isSample(t) { - return 'sample' in t; - } - function isWindow(t) { - return 'window' in t; - } - function isJoinAggregate(t) { - return 'joinaggregate' in t; - } - function isFlatten(t) { - return 'flatten' in t; - } - function isCalculate(t) { - return 'calculate' in t; - } - function isBin(t) { - return 'bin' in t; - } - function isImpute(t) { - return 'impute' in t; - } - function isTimeUnit(t) { - return 'timeUnit' in t; - } - function isAggregate$1(t) { - return 'aggregate' in t; - } - function isStack(t) { - return 'stack' in t; - } - function isFold(t) { - return 'fold' in t; - } - function normalizeTransform(transform) { - return transform.map(t => { - if (isFilter(t)) { - return { - filter: normalizeLogicalComposition(t.filter, normalizePredicate) - }; - } - - return t; - }); - } - const VIEW = 'view', LBRACK = '[', RBRACK = ']', LBRACE = '{', RBRACE = '}', COLON = ':', COMMA = ',', NAME = '@', GT = '>', - ILLEGAL = /[[\]{}]/, + ILLEGAL$1 = /[[\]{}]/, DEFAULT_MARKS = { '*': 1, arc: 1, area: 1, group: 1, @@ -9245,11 +9375,11 @@ if (i < n - 1 && s[++i] !== LBRACK) throw 'Expected left bracket: ' + s; start = ++i; } // marshall event stream specification - if (!(n = source.length) || ILLEGAL.test(source[n - 1])) { + if (!(n = source.length) || ILLEGAL$1.test(source[n - 1])) { throw 'Invalid event selector: ' + s; } if (n > 1) { stream.type = source[1]; @@ -9284,29 +9414,1133 @@ if (x !== x) throw s; return x; }); } + function assembleInit(init, isExpr = true, wrap = vegaUtil.identity) { + if (vegaUtil.isArray(init)) { + const assembled = init.map(v => assembleInit(v, isExpr, wrap)); + return isExpr ? `[${assembled.join(', ')}]` : assembled; + } else if (isDateTime(init)) { + if (isExpr) { + return wrap(dateTimeToExpr(init)); + } else { + return wrap(dateTimeToTimestamp(init)); + } + } + + return isExpr ? wrap(stringify(init)) : init; + } + function assembleUnitSelectionSignals(model, signals) { + for (const selCmpt of vals(model.component.selection ?? {})) { + const name = selCmpt.name; + let modifyExpr = `${name}${TUPLE}, ${selCmpt.resolve === 'global' ? 'true' : `{unit: ${unitName(model)}}`}`; + + for (const c of selectionCompilers) { + if (!c.defined(selCmpt)) continue; + if (c.signals) signals = c.signals(model, selCmpt, signals); + if (c.modifyExpr) modifyExpr = c.modifyExpr(model, selCmpt, modifyExpr); + } + + signals.push({ + name: name + MODIFY, + on: [{ + events: { + signal: selCmpt.name + TUPLE + }, + update: `modify(${vegaUtil.stringValue(selCmpt.name + STORE)}, ${modifyExpr})` + }] + }); + } + + return cleanupEmptyOnArray(signals); + } + function assembleFacetSignals(model, signals) { + if (model.component.selection && keys(model.component.selection).length) { + const name = vegaUtil.stringValue(model.getName('cell')); + signals.unshift({ + name: 'facet', + value: {}, + on: [{ + events: eventSelector('mousemove', 'scope'), + update: `isTuple(facet) ? facet : group(${name}).datum` + }] + }); + } + + return cleanupEmptyOnArray(signals); + } + function assembleTopLevelSignals(model, signals) { + let hasSelections = false; + + for (const selCmpt of vals(model.component.selection ?? {})) { + const name = selCmpt.name; + const store = vegaUtil.stringValue(name + STORE); + const hasSg = signals.filter(s => s.name === name); + + if (hasSg.length === 0) { + const resolve = selCmpt.resolve === 'global' ? 'union' : selCmpt.resolve; + const isPoint = selCmpt.type === 'point' ? ', true, true)' : ')'; + signals.push({ + name: selCmpt.name, + update: `${VL_SELECTION_RESOLVE}(${store}, ${vegaUtil.stringValue(resolve)}${isPoint}` + }); + } + + hasSelections = true; + + for (const c of selectionCompilers) { + if (c.defined(selCmpt) && c.topLevelSignals) { + signals = c.topLevelSignals(model, selCmpt, signals); + } + } + } + + if (hasSelections) { + const hasUnit = signals.filter(s => s.name === 'unit'); + + if (hasUnit.length === 0) { + signals.unshift({ + name: 'unit', + value: {}, + on: [{ + events: 'mousemove', + update: 'isTuple(group()) ? group() : unit' + }] + }); + } + } + + return cleanupEmptyOnArray(signals); + } + function assembleUnitSelectionData(model, data) { + const dataCopy = [...data]; + + for (const selCmpt of vals(model.component.selection ?? {})) { + const init = { + name: selCmpt.name + STORE + }; + + if (selCmpt.init) { + const fields = selCmpt.project.items.map(proj => { + const { + signals, + ...rest + } = proj; + return rest; + }); + init.values = selCmpt.init.map(i => ({ + unit: unitName(model, { + escape: false + }), + fields, + values: assembleInit(i, false) + })); + } + + const contains = dataCopy.filter(d => d.name === selCmpt.name + STORE); + + if (!contains.length) { + dataCopy.push(init); + } + } + + return dataCopy; + } + function assembleUnitSelectionMarks(model, marks) { + for (const selCmpt of vals(model.component.selection ?? {})) { + for (const c of selectionCompilers) { + if (c.defined(selCmpt) && c.marks) { + marks = c.marks(model, selCmpt, marks); + } + } + } + + return marks; + } + function assembleLayerSelectionMarks(model, marks) { + for (const child of model.children) { + if (isUnitModel(child)) { + marks = assembleUnitSelectionMarks(child, marks); + } + } + + return marks; + } + function assembleSelectionScaleDomain(model, extent, scaleCmpt, domain) { + const parsedExtent = parseSelectionExtent(model, extent.param, extent); + return { + signal: hasContinuousDomain(scaleCmpt.get('type')) && vegaUtil.isArray(domain) && domain[0] > domain[1] ? `isValid(${parsedExtent}) && reverse(${parsedExtent})` : parsedExtent + }; + } + + function cleanupEmptyOnArray(signals) { + return signals.map(s => { + if (s.on && !s.on.length) delete s.on; + return s; + }); + } + /** + * A node in the dataflow tree. + */ + + class DataFlowNode { + constructor(parent, debugName) { + this.debugName = debugName; + + _defineProperty(this, "_children", []); + + _defineProperty(this, "_parent", null); + + _defineProperty(this, "_hash", void 0); + + if (parent) { + this.parent = parent; + } + } + /** + * Clone this node with a deep copy but don't clone links to children or parents. + */ + + + clone() { + throw new Error('Cannot clone node'); + } + /** + * Return a hash of the node. + */ + + + get parent() { + return this._parent; + } + /** + * Set the parent of the node and also add this node to the parent's children. + */ + + + set parent(parent) { + this._parent = parent; + + if (parent) { + parent.addChild(this); + } + } + + get children() { + return this._children; + } + + numChildren() { + return this._children.length; + } + + addChild(child, loc) { + // do not add the same child twice + if (this._children.includes(child)) { + warn(ADD_SAME_CHILD_TWICE); + return; + } + + if (loc !== undefined) { + this._children.splice(loc, 0, child); + } else { + this._children.push(child); + } + } + + removeChild(oldChild) { + const loc = this._children.indexOf(oldChild); + + this._children.splice(loc, 1); + + return loc; + } + /** + * Remove node from the dataflow. + */ + + + remove() { + let loc = this._parent.removeChild(this); + + for (const child of this._children) { + // do not use the set method because we want to insert at a particular location + child._parent = this._parent; + + this._parent.addChild(child, loc++); + } + } + /** + * Insert another node as a parent of this node. + */ + + + insertAsParentOf(other) { + const parent = other.parent; + parent.removeChild(this); + this.parent = parent; + other.parent = this; + } + + swapWithParent() { + const parent = this._parent; + const newParent = parent.parent; // reconnect the children + + for (const child of this._children) { + child.parent = parent; + } // remove old links + + + this._children = []; // equivalent to removing every child link one by one + + parent.removeChild(this); + parent.parent.removeChild(parent); // swap two nodes + + this.parent = newParent; + parent.parent = this; + } + + } + class OutputNode extends DataFlowNode { + clone() { + const cloneObj = new this.constructor(); + cloneObj.debugName = `clone_${this.debugName}`; + cloneObj._source = this._source; + cloneObj._name = `clone_${this._name}`; + cloneObj.type = this.type; + cloneObj.refCounts = this.refCounts; + cloneObj.refCounts[cloneObj._name] = 0; + return cloneObj; + } + /** + * @param source The name of the source. Will change in assemble. + * @param type The type of the output node. + * @param refCounts A global ref counter map. + */ + + + constructor(parent, source, type, refCounts) { + super(parent, source); + this.type = type; + this.refCounts = refCounts; + + _defineProperty(this, "_source", void 0); + + _defineProperty(this, "_name", void 0); + + this._source = this._name = source; + + if (this.refCounts && !(this._name in this.refCounts)) { + this.refCounts[this._name] = 0; + } + } + + dependentFields() { + return new Set(); + } + + producedFields() { + return new Set(); + } + + hash() { + if (this._hash === undefined) { + this._hash = `Output ${uniqueId()}`; + } + + return this._hash; + } + /** + * Request the datasource name and increase the ref counter. + * + * During the parsing phase, this will return the simple name such as 'main' or 'raw'. + * It is crucial to request the name from an output node to mark it as a required node. + * If nobody ever requests the name, this datasource will not be instantiated in the assemble phase. + * + * In the assemble phase, this will return the correct name. + */ + + + getSource() { + this.refCounts[this._name]++; + return this._source; + } + + isRequired() { + return !!this.refCounts[this._name]; + } + + setSource(source) { + this._source = source; + } + + } + + class TimeUnitNode extends DataFlowNode { + clone() { + return new TimeUnitNode(null, duplicate(this.formula)); + } + + constructor(parent, formula) { + super(parent); + this.formula = formula; + } + + static makeFromEncoding(parent, model) { + const formula = model.reduceFieldDef((timeUnitComponent, fieldDef) => { + const { + field, + timeUnit + } = fieldDef; + + if (timeUnit) { + const as = vgField(fieldDef, { + forAs: true + }); + timeUnitComponent[hash({ + as, + field, + timeUnit + })] = { + as, + field, + timeUnit + }; + } + + return timeUnitComponent; + }, {}); + + if (isEmpty(formula)) { + return null; + } + + return new TimeUnitNode(parent, formula); + } + + static makeFromTransform(parent, t) { + const { + timeUnit, + ...other + } = { ...t + }; + const normalizedTimeUnit = normalizeTimeUnit(timeUnit); + const component = { ...other, + timeUnit: normalizedTimeUnit + }; + return new TimeUnitNode(parent, { + [hash(component)]: component + }); + } + /** + * Merge together TimeUnitNodes assigning the children of `other` to `this` + * and removing `other`. + */ + + + merge(other) { + this.formula = { ...this.formula + }; // if the same hash happen twice, merge + + for (const key in other.formula) { + if (!this.formula[key]) { + // copy if it's not a duplicate + this.formula[key] = other.formula[key]; + } + } + + for (const child of other.children) { + other.removeChild(child); + child.parent = this; + } + + other.remove(); + } + /** + * Remove time units coming from the other node. + */ + + + removeFormulas(fields) { + const newFormula = {}; + + for (const [key, timeUnit] of entries$1(this.formula)) { + if (!fields.has(timeUnit.as)) { + newFormula[key] = timeUnit; + } + } + + this.formula = newFormula; + } + + producedFields() { + return new Set(vals(this.formula).map(f => f.as)); + } + + dependentFields() { + return new Set(vals(this.formula).map(f => f.field)); + } + + hash() { + return `TimeUnit ${hash(this.formula)}`; + } + + assemble() { + const transforms = []; + + for (const f of vals(this.formula)) { + const { + field, + as, + timeUnit + } = f; + const { + unit, + utc, + ...params + } = normalizeTimeUnit(timeUnit); + transforms.push({ + field: replacePathInField(field), + type: 'timeunit', + ...(unit ? { + units: getTimeUnitParts(unit) + } : {}), + ...(utc ? { + timezone: 'utc' + } : {}), + ...params, + as: [as, `${as}_end`] + }); + } + + return transforms; + } + + } + + const TUPLE_FIELDS = '_tuple_fields'; + /** + * Whether the selection tuples hold enumerated or ranged values for a field. + */ + + class SelectionProjectionComponent { + constructor(...items) { + _defineProperty(this, "hasChannel", void 0); + + _defineProperty(this, "hasField", void 0); + + _defineProperty(this, "timeUnit", void 0); + + _defineProperty(this, "items", void 0); + + this.items = items; + this.hasChannel = {}; + this.hasField = {}; + } + + } + const project = { + defined: () => { + return true; // This transform handles its own defaults, so always run parse. + }, + parse: (model, selCmpt, selDef) => { + const name = selCmpt.name; + const proj = selCmpt.project ?? (selCmpt.project = new SelectionProjectionComponent()); + const parsed = {}; + const timeUnits = {}; + const signals = new Set(); + + const signalName = (p, range) => { + const suffix = range === 'visual' ? p.channel : p.field; + let sg = varName(`${name}_${suffix}`); + + for (let counter = 1; signals.has(sg); counter++) { + sg = varName(`${name}_${suffix}_${counter}`); + } + + signals.add(sg); + return { + [range]: sg + }; + }; + + const type = selCmpt.type; + const cfg = model.config.selection[type]; + const init = selDef.value !== undefined ? vegaUtil.array(selDef.value) : null; // If no explicit projection (either fields or encodings) is specified, set some defaults. + // If an initial value is set, try to infer projections. + + let { + fields, + encodings + } = vegaUtil.isObject(selDef.select) ? selDef.select : {}; + + if (!fields && !encodings && init) { + for (const initVal of init) { + // initVal may be a scalar value to smoothen varParam -> pointSelection gradient. + if (!vegaUtil.isObject(initVal)) { + continue; + } + + for (const key of keys(initVal)) { + if (isSingleDefUnitChannel(key)) { + (encodings || (encodings = [])).push(key); + } else { + if (type === 'interval') { + warn(INTERVAL_INITIALIZED_WITH_X_Y); + encodings = cfg.encodings; + } else { + (fields || (fields = [])).push(key); + } + } + } + } + } // If no initial value is specified, use the default configuration. + // We break this out as a separate if block (instead of an else condition) + // to account for unprojected point selections that have scalar initial values + + + if (!fields && !encodings) { + encodings = cfg.encodings; + + if ('fields' in cfg) { + fields = cfg.fields; + } + } + + for (const channel of encodings ?? []) { + const fieldDef = model.fieldDef(channel); + + if (fieldDef) { + let field = fieldDef.field; + + if (fieldDef.aggregate) { + warn(cannotProjectAggregate(channel, fieldDef.aggregate)); + continue; + } else if (!field) { + warn(cannotProjectOnChannelWithoutField(channel)); + continue; + } + + if (fieldDef.timeUnit) { + field = model.vgField(channel); // Construct TimeUnitComponents which will be combined into a + // TimeUnitNode. This node may need to be inserted into the + // dataflow if the selection is used across views that do not + // have these time units defined. + + const component = { + timeUnit: fieldDef.timeUnit, + as: field, + field: fieldDef.field + }; + timeUnits[hash(component)] = component; + } // Prevent duplicate projections on the same field. + // TODO: what if the same field is bound to multiple channels (e.g., SPLOM diag). + + + if (!parsed[field]) { + // Determine whether the tuple will store enumerated or ranged values. + // Interval selections store ranges for continuous scales, and enumerations otherwise. + // Single/multi selections store ranges for binned fields, and enumerations otherwise. + let tplType = 'E'; + + if (type === 'interval') { + const scaleType = model.getScaleComponent(channel).get('type'); + + if (hasContinuousDomain(scaleType)) { + tplType = 'R'; + } + } else if (fieldDef.bin) { + tplType = 'R-RE'; + } + + const p = { + field, + channel, + type: tplType + }; + p.signals = { ...signalName(p, 'data'), + ...signalName(p, 'visual') + }; + proj.items.push(parsed[field] = p); + proj.hasField[field] = proj.hasChannel[channel] = parsed[field]; + } + } else { + warn(cannotProjectOnChannelWithoutField(channel)); + } + } + + for (const field of fields ?? []) { + if (proj.hasField[field]) continue; + const p = { + type: 'E', + field + }; + p.signals = { ...signalName(p, 'data') + }; + proj.items.push(p); + proj.hasField[field] = p; + } + + if (init) { + selCmpt.init = init.map(v => { + // Selections can be initialized either with a full object that maps projections to values + // or scalar values to smoothen the abstraction gradient from variable params to point selections. + return proj.items.map(p => vegaUtil.isObject(v) ? v[p.channel] !== undefined ? v[p.channel] : v[p.field] : v); + }); + } + + if (!isEmpty(timeUnits)) { + proj.timeUnit = new TimeUnitNode(null, timeUnits); + } + }, + signals: (model, selCmpt, allSignals) => { + const name = selCmpt.name + TUPLE_FIELDS; + const hasSignal = allSignals.filter(s => s.name === name); + return hasSignal.length > 0 ? allSignals : allSignals.concat({ + name, + value: selCmpt.project.items.map(proj => { + const { + signals, + hasLegend, + ...rest + } = proj; + rest.field = replacePathInField(rest.field); + return rest; + }) + }); + } + }; + + const scaleBindings = { + defined: selCmpt => { + return selCmpt.type === 'interval' && selCmpt.resolve === 'global' && selCmpt.bind && selCmpt.bind === 'scales'; + }, + parse: (model, selCmpt) => { + const bound = selCmpt.scales = []; + + for (const proj of selCmpt.project.items) { + const channel = proj.channel; + + if (!isScaleChannel(channel)) { + continue; + } + + const scale = model.getScaleComponent(channel); + const scaleType = scale ? scale.get('type') : undefined; + + if (!scale || !hasContinuousDomain(scaleType)) { + warn(SCALE_BINDINGS_CONTINUOUS); + continue; + } + + scale.set('selectionExtent', { + param: selCmpt.name, + field: proj.field + }, true); + bound.push(proj); + } + }, + topLevelSignals: (model, selCmpt, signals) => { + const bound = selCmpt.scales.filter(proj => signals.filter(s => s.name === proj.signals.data).length === 0); // Top-level signals are only needed for multiview displays and if this + // view's top-level signals haven't already been generated. + + if (!model.parent || isTopLevelLayer(model) || bound.length === 0) { + return signals; + } // vlSelectionResolve does not account for the behavior of bound scales in + // multiview displays. Each unit view adds a tuple to the store, but the + // state of the selection is the unit selection most recently updated. This + // state is captured by the top-level signals that we insert and "push + // outer" to from within the units. We need to reassemble this state into + // the top-level named signal, except no single selCmpt has a global view. + + + const namedSg = signals.filter(s => s.name === selCmpt.name)[0]; + let update = namedSg.update; + + if (update.indexOf(VL_SELECTION_RESOLVE) >= 0) { + namedSg.update = `{${bound.map(proj => `${vegaUtil.stringValue(replacePathInField(proj.field))}: ${proj.signals.data}`).join(', ')}}`; + } else { + for (const proj of bound) { + const mapping = `${vegaUtil.stringValue(replacePathInField(proj.field))}: ${proj.signals.data}`; + + if (!update.includes(mapping)) { + update = `${update.substring(0, update.length - 1)}, ${mapping}}`; + } + } + + namedSg.update = update; + } + + return signals.concat(bound.map(proj => ({ + name: proj.signals.data + }))); + }, + signals: (model, selCmpt, signals) => { + // Nested signals need only push to top-level signals with multiview displays. + if (model.parent && !isTopLevelLayer(model)) { + for (const proj of selCmpt.scales) { + const signal = signals.filter(s => s.name === proj.signals.data)[0]; + signal.push = 'outer'; + delete signal.value; + delete signal.update; + } + } + + return signals; + } + }; + function domain(model, channel) { + const scale = vegaUtil.stringValue(model.scaleName(channel)); + return `domain(${scale})`; + } + + function isTopLevelLayer(model) { + return model.parent && isLayerModel(model.parent) && (!model.parent.parent ?? isTopLevelLayer(model.parent.parent)); + } + + const BRUSH = '_brush'; + const SCALE_TRIGGER = '_scale_trigger'; + const interval = { + defined: selCmpt => selCmpt.type === 'interval', + signals: (model, selCmpt, signals) => { + const name = selCmpt.name; + const fieldsSg = name + TUPLE_FIELDS; + const hasScales = scaleBindings.defined(selCmpt); + const init = selCmpt.init ? selCmpt.init[0] : null; + const dataSignals = []; + const scaleTriggers = []; + + if (selCmpt.translate && !hasScales) { + const filterExpr = `!event.item || event.item.mark.name !== ${vegaUtil.stringValue(name + BRUSH)}`; + events(selCmpt, (on, evt) => { + var _evt$between$; + + const filters = vegaUtil.array((_evt$between$ = evt.between[0]).filter ?? (_evt$between$.filter = [])); + + if (!filters.includes(filterExpr)) { + filters.push(filterExpr); + } + + return on; + }); + } + + selCmpt.project.items.forEach((proj, i) => { + const channel = proj.channel; + + if (channel !== X && channel !== Y) { + warn('Interval selections only support x and y encoding channels.'); + return; + } + + const val = init ? init[i] : null; + const cs = channelSignals(model, selCmpt, proj, val); + const dname = proj.signals.data; + const vname = proj.signals.visual; + const scaleName = vegaUtil.stringValue(model.scaleName(channel)); + const scaleType = model.getScaleComponent(channel).get('type'); + const toNum = hasContinuousDomain(scaleType) ? '+' : ''; + signals.push(...cs); + dataSignals.push(dname); + scaleTriggers.push({ + scaleName: model.scaleName(channel), + expr: `(!isArray(${dname}) || ` + `(${toNum}invert(${scaleName}, ${vname})[0] === ${toNum}${dname}[0] && ` + `${toNum}invert(${scaleName}, ${vname})[1] === ${toNum}${dname}[1]))` + }); + }); // Proxy scale reactions to ensure that an infinite loop doesn't occur + // when an interval selection filter touches the scale. + + if (!hasScales && scaleTriggers.length) { + signals.push({ + name: name + SCALE_TRIGGER, + value: {}, + on: [{ + events: scaleTriggers.map(t => ({ + scale: t.scaleName + })), + update: `${scaleTriggers.map(t => t.expr).join(' && ')} ? ${name + SCALE_TRIGGER} : {}` + }] + }); + } // Only add an interval to the store if it has valid data extents. Data extents + // are set to null if pixel extents are equal to account for intervals over + // ordinal/nominal domains which, when inverted, will still produce a valid datum. + + + const update = `unit: ${unitName(model)}, fields: ${fieldsSg}, values`; + return signals.concat({ + name: name + TUPLE, + ...(init ? { + init: `{${update}: ${assembleInit(init)}}` + } : {}), + ...(dataSignals.length ? { + on: [{ + events: [{ + signal: dataSignals.join(' || ') + }], + // Prevents double invocation, see https://github.com/vega/vega#1672. + update: `${dataSignals.join(' && ')} ? {${update}: [${dataSignals}]} : null` + }] + } : {}) + }); + }, + marks: (model, selCmpt, marks) => { + const name = selCmpt.name; + const { + x, + y + } = selCmpt.project.hasChannel; + const xvname = x && x.signals.visual; + const yvname = y && y.signals.visual; + const store = `data(${vegaUtil.stringValue(selCmpt.name + STORE)})`; // Do not add a brush if we're binding to scales + // or we don't have a valid interval projection + + if (scaleBindings.defined(selCmpt) || !x && !y) { + return marks; + } + + const update = { + x: x !== undefined ? { + signal: `${xvname}[0]` + } : { + value: 0 + }, + y: y !== undefined ? { + signal: `${yvname}[0]` + } : { + value: 0 + }, + x2: x !== undefined ? { + signal: `${xvname}[1]` + } : { + field: { + group: 'width' + } + }, + y2: y !== undefined ? { + signal: `${yvname}[1]` + } : { + field: { + group: 'height' + } + } + }; // If the selection is resolved to global, only a single interval is in + // the store. Wrap brush mark's encodings with a production rule to test + // this based on the `unit` property. Hide the brush mark if it corresponds + // to a unit different from the one in the store. + + if (selCmpt.resolve === 'global') { + for (const key of keys(update)) { + update[key] = [{ + test: `${store}.length && ${store}[0].unit === ${unitName(model)}`, + ...update[key] + }, { + value: 0 + }]; + } + } // Two brush marks ensure that fill colors and other aesthetic choices do + // not interefere with the core marks, but that the brushed region can still + // be interacted with (e.g., dragging it around). + + + const { + fill, + fillOpacity, + cursor, + ...stroke + } = selCmpt.mark; + const vgStroke = keys(stroke).reduce((def, k) => { + def[k] = [{ + test: [x !== undefined && `${xvname}[0] !== ${xvname}[1]`, y !== undefined && `${yvname}[0] !== ${yvname}[1]`].filter(t => t).join(' && '), + value: stroke[k] + }, { + value: null + }]; + return def; + }, {}); + return [{ + name: `${name + BRUSH}_bg`, + type: 'rect', + clip: true, + encode: { + enter: { + fill: { + value: fill + }, + fillOpacity: { + value: fillOpacity + } + }, + update: update + } + }, ...marks, { + name: name + BRUSH, + type: 'rect', + clip: true, + encode: { + enter: { ...(cursor ? { + cursor: { + value: cursor + } + } : {}), + fill: { + value: 'transparent' + } + }, + update: { ...update, + ...vgStroke + } + } + }]; + } + }; + /** + * Returns the visual and data signals for an interval selection. + */ + + function channelSignals(model, selCmpt, proj, init) { + const channel = proj.channel; + const vname = proj.signals.visual; + const dname = proj.signals.data; + const hasScales = scaleBindings.defined(selCmpt); + const scaleName = vegaUtil.stringValue(model.scaleName(channel)); + const scale = model.getScaleComponent(channel); + const scaleType = scale ? scale.get('type') : undefined; + + const scaled = str => `scale(${scaleName}, ${str})`; + + const size = model.getSizeSignalRef(channel === X ? 'width' : 'height').signal; + const coord = `${channel}(unit)`; + const on = events(selCmpt, (def, evt) => { + return [...def, { + events: evt.between[0], + update: `[${coord}, ${coord}]` + }, // Brush Start + { + events: evt, + update: `[${vname}[0], clamp(${coord}, 0, ${size})]` + } // Brush End + ]; + }); // React to pan/zooms of continuous scales. Non-continuous scales + // (band, point) cannot be pan/zoomed and any other changes + // to their domains (e.g., filtering) should clear the brushes. + + on.push({ + events: { + signal: selCmpt.name + SCALE_TRIGGER + }, + update: hasContinuousDomain(scaleType) ? `[${scaled(`${dname}[0]`)}, ${scaled(`${dname}[1]`)}]` : `[0, 0]` + }); + return hasScales ? [{ + name: dname, + on: [] + }] : [{ + name: vname, + ...(init ? { + init: assembleInit(init, true, scaled) + } : { + value: [] + }), + on: on + }, { + name: dname, + ...(init ? { + init: assembleInit(init) + } : {}), + // Cannot be `value` as `init` may require datetime exprs. + on: [{ + events: { + signal: vname + }, + update: `${vname}[0] === ${vname}[1] ? null : invert(${scaleName}, ${vname})` + }] + }]; + } + + function events(selCmpt, cb) { + return selCmpt.events.reduce((on, evt) => { + if (!evt.between) { + warn(`${evt} is not an ordered event stream for interval selections.`); + return on; + } + + return cb(on, evt); + }, []); + } + + const point$1 = { + defined: selCmpt => selCmpt.type === 'point', + signals: (model, selCmpt, signals) => { + const name = selCmpt.name; + const fieldsSg = name + TUPLE_FIELDS; + const project = selCmpt.project; + const datum = '(item().isVoronoi ? datum.datum : datum)'; + const values = project.items.map(p => { + const fieldDef = model.fieldDef(p.channel); // Binned fields should capture extents, for a range test against the raw field. + + return fieldDef !== null && fieldDef !== void 0 && fieldDef.bin ? `[${datum}[${vegaUtil.stringValue(model.vgField(p.channel, {}))}], ` + `${datum}[${vegaUtil.stringValue(model.vgField(p.channel, { + binSuffix: 'end' + }))}]]` : `${datum}[${vegaUtil.stringValue(p.field)}]`; + }).join(', '); // Only add a discrete selection to the store if a datum is present _and_ + // the interaction isn't occurring on a group mark. This guards against + // polluting interactive state with invalid values in faceted displays + // as the group marks are also data-driven. We force the update to account + // for constant null states but varying toggles (e.g., shift-click in + // whitespace followed by a click in whitespace; the store should only + // be cleared on the second click). + + const update = `unit: ${unitName(model)}, fields: ${fieldsSg}, values`; + const events = selCmpt.events; + const brushes = vals(model.component.selection ?? {}).reduce((acc, cmpt) => { + return cmpt.type === 'interval' ? acc.concat(cmpt.name + BRUSH) : acc; + }, []).map(b => `indexof(item().mark.name, '${b}') < 0`).join(' && '); + const test = `datum && item().mark.marktype !== 'group'` + (brushes ? ` && ${brushes}` : ''); + return signals.concat([{ + name: name + TUPLE, + on: events ? [{ + events, + update: `${test} ? {${update}: [${values}]} : null`, + force: true + }] : [] + }]); + } + }; + + /** * Return a mixin that includes a Vega production rule for a Vega-Lite conditional channel definition * or a simple mixin if channel def has no condition. */ function wrapCondition(model, channelDef, vgChannel, refFn) { const condition = isConditionalDef(channelDef) && channelDef.condition; const valueRef = refFn(channelDef); if (condition) { - const conditions = array(condition); + const conditions = vegaUtil.array(condition); const vgConditions = conditions.map(c => { const conditionValueRef = refFn(c); - const test = isConditionalSelection(c) ? parseSelectionPredicate(model, c.selection) // FIXME: remove casting once TS is no longer dumb about it - : expression(model, c.test); // FIXME: remove casting once TS is no longer dumb about it - return { - test, - ...conditionValueRef - }; + if (isConditionalParameter(c)) { + const { + param, + empty + } = c; + const test = parseSelectionPredicate(model, { + param, + empty + }); + return { + test, + ...conditionValueRef + }; + } else { + const test = expression(model, c.test); // FIXME: remove casting once TS is no longer dumb about it + + return { + test, + ...conditionValueRef + }; + } }); return { [vgChannel]: [...vgConditions, ...(valueRef !== undefined ? [valueRef] : [])] }; } else { @@ -9314,11 +10548,11 @@ [vgChannel]: valueRef } : {}; } } - function text(model, channel = 'text') { + function text$1(model, channel = 'text') { const channelDef = model.encoding[channel]; return wrapCondition(model, channelDef, channel, cDef => textRef(cDef, model.config)); } function textRef(channelDef, config, expr = 'datum') { // text @@ -9352,11 +10586,11 @@ config, stack } = model; const channelDef = encoding.tooltip; - if (isArray(channelDef)) { + if (vegaUtil.isArray(channelDef)) { return { tooltip: tooltipRefForEncoding({ tooltip: channelDef }, stack, config, opt) }; @@ -9381,15 +10615,15 @@ markTooltip = { content: 'encoding' }; } - if (isString(markTooltip)) { + if (vegaUtil.isString(markTooltip)) { return { value: markTooltip }; - } else if (isObject(markTooltip)) { + } else if (vegaUtil.isObject(markTooltip)) { // `tooltip` is `{fields: 'encodings' | 'fields'}` if (isSignalRef(markTooltip)) { return markTooltip; } else if (markTooltip.content === 'encoding') { return tooltipRefForEncoding(encoding, stack, config, opt); @@ -9410,19 +10644,17 @@ const toSkip = {}; const expr = reactiveGeom ? 'datum.datum' : 'datum'; const tuples = []; function add(fDef, channel) { - var _value; - const mainChannel = getMainRangeChannel(channel); const fieldDef = isTypedFieldDef(fDef) ? fDef : { ...fDef, type: encoding[mainChannel].type // for secondary field def, copy type from main channel }; const title = fieldDef.title || defaultTitle(fieldDef, config); - const key = array(title).join(', '); + const key = vegaUtil.array(title).join(', '); let value; if (isXorY(channel)) { const channel2 = channel === 'x' ? 'x2' : 'y2'; const fieldDef2 = getFieldDef(encoding[channel2]); @@ -9454,11 +10686,11 @@ normalizeStack: true }).signal; } } - value = (_value = value) !== null && _value !== void 0 ? _value : textRef(fieldDef, config, expr).signal; + value ?? (value = textRef(fieldDef, config, expr).signal); tuples.push({ channel, key, value }); @@ -9489,13 +10721,13 @@ reactiveGeom } = {}) { const data = tooltipData(encoding, stack, config, { reactiveGeom }); - const keyValues = entries(data).map(([key, value]) => "\"".concat(key, "\": ").concat(value)); + const keyValues = entries$1(data).map(([key, value]) => `"${key}": ${value}`); return keyValues.length > 0 ? { - signal: "{".concat(keyValues.join(', '), "}") + signal: `{${keyValues.join(', ')}}` } : undefined; } function aria(model) { const { @@ -9578,11 +10810,11 @@ return undefined; } return { description: { - signal: entries(data).map(([key, value], index) => "\"".concat(index > 0 ? '; ' : '').concat(key, ": \" + (").concat(value, ")")).join(' + ') + signal: entries$1(data).map(([key, value], index) => `"${index > 0 ? '; ' : ''}${key}: " + (${value})`).join(' + ') } }; } /** @@ -9602,25 +10834,23 @@ defaultRef, defaultValue } = opt; if (defaultRef === undefined) { - var _defaultValue; - // prettier-ignore - defaultValue = (_defaultValue = defaultValue) !== null && _defaultValue !== void 0 ? _defaultValue : getMarkPropOrConfig(channel, markDef, config, { + defaultValue ?? (defaultValue = getMarkPropOrConfig(channel, markDef, config, { vgChannel, ignoreVgConfig: true - }); + })); if (defaultValue !== undefined) { defaultRef = signalOrValueRef(defaultValue); } } const channelDef = encoding[channel]; - return wrapCondition(model, channelDef, vgChannel !== null && vgChannel !== void 0 ? vgChannel : channel, cDef => { + return wrapCondition(model, channelDef, vgChannel ?? channel, cDef => { return midPoint({ channel, channelDef: cDef, markDef, config, @@ -9634,32 +10864,30 @@ } function color(model, opt = { filled: undefined }) { - var _opt$filled, _ref, _getMarkPropOrConfig, _getMarkPropOrConfig2; - const { markDef, encoding, config } = model; const { type: markType } = markDef; // Allow filled to be overridden (for trail's "filled") - const filled = (_opt$filled = opt.filled) !== null && _opt$filled !== void 0 ? _opt$filled : getMarkPropOrConfig('filled', markDef, config); + const filled = opt.filled ?? getMarkPropOrConfig('filled', markDef, config); const transparentIfNeeded = contains(['bar', 'point', 'circle', 'square', 'geoshape'], markType) ? 'transparent' : undefined; - const defaultFill = (_ref = (_getMarkPropOrConfig = getMarkPropOrConfig(filled === true ? 'color' : undefined, markDef, config, { + const defaultFill = getMarkPropOrConfig(filled === true ? 'color' : undefined, markDef, config, { vgChannel: 'fill' - })) !== null && _getMarkPropOrConfig !== void 0 ? _getMarkPropOrConfig : // need to add this manually as getMarkConfig normally drops config.mark[channel] if vgChannel is specified - config.mark[filled === true && 'color']) !== null && _ref !== void 0 ? _ref : // If there is no fill, always fill symbols, bar, geoshape + }) ?? // need to add this manually as getMarkConfig normally drops config.mark[channel] if vgChannel is specified + config.mark[filled === true && 'color'] ?? // If there is no fill, always fill symbols, bar, geoshape // with transparent fills https://github.com/vega/vega-lite/issues/1316 transparentIfNeeded; - const defaultStroke = (_getMarkPropOrConfig2 = getMarkPropOrConfig(filled === false ? 'color' : undefined, markDef, config, { + const defaultStroke = getMarkPropOrConfig(filled === false ? 'color' : undefined, markDef, config, { vgChannel: 'stroke' - })) !== null && _getMarkPropOrConfig2 !== void 0 ? _getMarkPropOrConfig2 : // need to add this manually as getMarkConfig normally drops config.mark[channel] if vgChannel is specified + }) ?? // need to add this manually as getMarkConfig normally drops config.mark[channel] if vgChannel is specified config.mark[filled === false && 'color']; const colorVgChannel = filled ? 'fill' : 'stroke'; const fillStrokeMarkDefAndConfig = { ...(defaultFill ? { fill: signalOrValueRef(defaultFill) } : {}), @@ -9724,12 +10952,11 @@ * Return encode for point (non-band) position channels. */ function pointPosition(channel, model, { defaultPos, - vgChannel, - isMidPoint + vgChannel }) { const { encoding, markDef, config, @@ -9755,11 +10982,10 @@ channel, channelDef, channel2Def, markDef, config, - isMidPoint, scaleName, scale, stack, offset, defaultRef @@ -9776,36 +11002,33 @@ function positionRef(params) { const { channel, channelDef, - isMidPoint, scaleName, stack, offset, - markDef, - config + markDef } = params; // This isn't a part of midPoint because we use midPoint for non-position too if (isFieldOrDatumDef(channelDef) && stack && channel === stack.fieldChannel) { if (isFieldDef(channelDef)) { - const band = getBand({ - channel, - fieldDef: channelDef, - isMidPoint, - markDef, - stack, - config - }); + let bandPosition = channelDef.bandPosition; - if (band !== undefined) { + if (bandPosition === undefined && markDef.type === 'text' && (channel === 'radius' || channel === 'theta')) { + // theta and radius of text mark should use bandPosition = 0.5 by default + // so that labels for arc marks are centered automatically + bandPosition = 0.5; + } + + if (bandPosition !== undefined) { return interpolatedSignalRef({ scaleName, fieldOrDatumDef: channelDef, // positionRef always have type startSuffix: 'start', - band, + bandPosition, offset }); } } // x or y use stack_end so that stacked line's point mark use stack_end too. @@ -9869,11 +11092,11 @@ // zeroOrMax switch (mainChannel) { case 'radius': // max of radius is min(width, height) / 2 return { - signal: "min(".concat(model.width.signal, ",").concat(model.height.signal, ")/2") + signal: `min(${model.width.signal},${model.height.signal})/2` }; case 'theta': return { signal: '2*PI' @@ -9895,11 +11118,11 @@ break; case 'mid': { - const sizeRef = model[getSizeChannel(channel)]; + const sizeRef = model[getSizeChannel$1(channel)]; return { ...sizeRef, mult: 0.5 }; } } // defaultPos === null @@ -9941,11 +11164,12 @@ return BASELINED_Y_CHANNEL[alignExcludingSignal || defaultAlign]; } } /** - * Utility for area/rule position, which can be either point or range. (One of the axes should be point and the other should be range.) + * Utility for area/rule position, which can be either point or range. + * (One of the axes should be point and the other should be range.) */ function pointOrRangePosition(channel, model, { defaultPos, defaultPos2, @@ -9969,11 +11193,11 @@ const { markDef, config } = model; const channel2 = getSecondaryRangeChannel(channel); - const sizeChannel = getSizeChannel(channel); + const sizeChannel = getSizeChannel$1(channel); const pos2Mixins = pointPosition2OrSize(model, defaultPos2, channel2); const vgChannel = pos2Mixins[sizeChannel] ? // If there is width/height, we need to position the marks based on the alignment. vgAlignedPositionChannel(channel, markDef, config) : // Otherwise, make sure to apply to the right Vg Channel (for arc mark) getVgPositionChannel(channel); return { ...pointPosition(channel, model, { @@ -9995,24 +11219,34 @@ markDef, stack, config } = model; const baseChannel = getMainRangeChannel(channel); - const sizeChannel = getSizeChannel(channel); + const sizeChannel = getSizeChannel$1(channel); const vgChannel = getVgPositionChannel(channel); const channelDef = encoding[baseChannel]; const scaleName = model.scaleName(baseChannel); const scale = model.getScaleComponent(baseChannel); const offset = channel in encoding || channel in markDef ? getOffset(channel, model.markDef) : getOffset(baseChannel, model.markDef); if (!channelDef && (channel === 'x2' || channel === 'y2') && (encoding.latitude || encoding.longitude)) { - // use geopoint output if there are lat2/long2 and there is no point position2 overriding lat2/long2. - return { - [vgChannel]: { - field: model.getName(channel) - } - }; + const vgSizeChannel = getSizeChannel$1(channel); + const size = model.markDef[vgSizeChannel]; + + if (size != null) { + return { + [vgSizeChannel]: { + value: size + } + }; + } else { + return { + [vgChannel]: { + field: model.getName(channel) + } + }; + } } const valueRef = position2Ref({ channel, channelDef, @@ -10082,11 +11316,11 @@ defaultRef }); } function position2orSize(channel, markDef) { - const sizeChannel = getSizeChannel(channel); + const sizeChannel = getSizeChannel$1(channel); const vgChannel = getVgPositionChannel(channel); if (markDef[vgChannel] !== undefined) { return { [vgChannel]: widthHeightValueOrSignalRef(channel, markDef[vgChannel]) @@ -10094,180 +11328,180 @@ } else if (markDef[channel] !== undefined) { return { [vgChannel]: widthHeightValueOrSignalRef(channel, markDef[channel]) }; } else if (markDef[sizeChannel]) { - return { - [sizeChannel]: widthHeightValueOrSignalRef(channel, markDef[sizeChannel]) - }; + const dimensionSize = markDef[sizeChannel]; + + if (isRelativeBandSize(dimensionSize)) { + warn(relativeBandSizeNotSupported(sizeChannel)); + } else { + return { + [sizeChannel]: widthHeightValueOrSignalRef(channel, dimensionSize) + }; + } } return undefined; } function rectPosition(model, channel, mark) { - var _ref, _encoding$sizeChannel; - const { config, encoding, - markDef, - stack + markDef } = model; const channel2 = getSecondaryRangeChannel(channel); - const sizeChannel = getSizeChannel(channel); + const sizeChannel = getSizeChannel$1(channel); const channelDef = encoding[channel]; const channelDef2 = encoding[channel2]; const scale = model.getScaleComponent(channel); const scaleType = scale ? scale.get('type') : undefined; const scaleName = model.scaleName(channel); const orient = markDef.orient; - const hasSizeDef = (_ref = (_encoding$sizeChannel = encoding[sizeChannel]) !== null && _encoding$sizeChannel !== void 0 ? _encoding$sizeChannel : encoding.size) !== null && _ref !== void 0 ? _ref : getMarkPropOrConfig('size', markDef, config, { + const hasSizeDef = encoding[sizeChannel] ?? encoding.size ?? getMarkPropOrConfig('size', markDef, config, { vgChannel: sizeChannel }); const isBarBand = mark === 'bar' && (channel === 'x' ? orient === 'vertical' : orient === 'horizontal'); // x, x2, and width -- we must specify two of these in all conditions - if (isFieldDef(channelDef) && (isBinning(channelDef.bin) || isBinned(channelDef.bin) || channelDef.timeUnit && !channelDef2) && !hasSizeDef && !hasDiscreteDomain(scaleType)) { - var _model$component$axes, _axis$get; + if (isFieldDef(channelDef) && (isBinning(channelDef.bin) || isBinned(channelDef.bin) || channelDef.timeUnit && !channelDef2) && !(hasSizeDef && !isRelativeBandSize(hasSizeDef)) && !hasDiscreteDomain(scaleType)) { + var _model$component$axes; - const band = getBand({ + const bandSize = getBandSize({ channel, fieldDef: channelDef, - stack, markDef, - config + config, + scaleType }); const axis = (_model$component$axes = model.component.axes[channel]) === null || _model$component$axes === void 0 ? void 0 : _model$component$axes[0]; - const axisTranslate = (_axis$get = axis === null || axis === void 0 ? void 0 : axis.get('translate')) !== null && _axis$get !== void 0 ? _axis$get : 0.5; // vega default is 0.5 + const axisTranslate = (axis === null || axis === void 0 ? void 0 : axis.get('translate')) ?? 0.5; // vega default is 0.5 return rectBinPosition({ fieldDef: channelDef, fieldDef2: channelDef2, channel, markDef, scaleName, - band, + bandSize, axisTranslate, spacing: isXorY(channel) ? getMarkPropOrConfig('binSpacing', markDef, config) : undefined, reverse: scale.get('reverse'), config }); } else if ((isFieldOrDatumDef(channelDef) && hasDiscreteDomain(scaleType) || isBarBand) && !channelDef2) { - return positionAndSize(mark, channelDef, channel, model); + return positionAndSize(channelDef, channel, model); } else { return rangePosition(channel, model, { defaultPos: 'zeroOrMax', defaultPos2: 'zeroOrMin' }); } } - function defaultSizeRef(mark, sizeChannel, scaleName, scale, config, band) { - if (scale) { - const scaleType = scale.get('type'); + function defaultSizeRef(sizeChannel, scaleName, scale, config, bandSize) { + if (isRelativeBandSize(bandSize)) { + if (scale) { + const scaleType = scale.get('type'); - if (scaleType === 'point' || scaleType === 'band') { - if (config[mark].discreteBandSize !== undefined) { + if (scaleType === 'band') { return { - value: config[mark].discreteBandSize - }; - } - - if (scaleType === ScaleType.POINT) { - const scaleRange = scale.get('range'); - - if (isVgRangeStep(scaleRange) && isNumber(scaleRange.step)) { - return { - value: scaleRange.step - 2 - }; - } - - return { - value: DEFAULT_STEP - 2 - }; - } else { - // BAND - return { scale: scaleName, - band + band: bandSize.band }; + } else if (bandSize.band !== 1) { + warn(cannotUseRelativeBandSizeWithNonBandScale(scaleType)); + bandSize = undefined; } } else { - // continuous scale return { - value: config[mark].continuousBandSize + mult: bandSize.band, + field: { + group: sizeChannel + } }; } - } // No Scale + } else if (isSignalRef(bandSize)) { + return bandSize; + } else if (bandSize) { + return { + value: bandSize + }; + } // no valid band size - const step = getViewConfigDiscreteStep(config.view, sizeChannel); - const value = getFirstDefined( // No scale is like discrete bar (with one item) - config[mark].discreteBandSize, step - 2); - return value !== undefined ? { - value - } : undefined; + if (scale) { + const scaleRange = scale.get('range'); + + if (isVgRangeStep(scaleRange) && vegaUtil.isNumber(scaleRange.step)) { + return { + value: scaleRange.step - 2 + }; + } + } + + const defaultStep = getViewConfigDiscreteStep(config.view, sizeChannel); + return { + value: defaultStep - 2 + }; } /** * Output position encoding and its size encoding for continuous, point, and band scales. */ - function positionAndSize(mark, fieldDef, channel, model) { - var _ref2; - + function positionAndSize(fieldDef, channel, model) { const { markDef, encoding, config, stack } = model; const orient = markDef.orient; const scaleName = model.scaleName(channel); const scale = model.getScaleComponent(channel); - const vgSizeChannel = getSizeChannel(channel); + const vgSizeChannel = getSizeChannel$1(channel); const channel2 = getSecondaryRangeChannel(channel); // use "size" channel for bars, if there is orient and the channel matches the right orientation - const useVlSizeChannel = orient === 'horizontal' && channel === 'y' || orient === 'vertical' && channel === 'x'; - const sizeFromMarkOrConfig = getMarkPropOrConfig(useVlSizeChannel ? 'size' : vgSizeChannel, markDef, config, { - vgChannel: vgSizeChannel - }); // Use size encoding / mark property / config if it exists + const useVlSizeChannel = orient === 'horizontal' && channel === 'y' || orient === 'vertical' && channel === 'x'; // Use size encoding / mark property / config if it exists let sizeMixins; - if (encoding.size || sizeFromMarkOrConfig !== undefined) { + if (encoding.size || markDef.size) { if (useVlSizeChannel) { sizeMixins = nonPosition('size', model, { vgChannel: vgSizeChannel, - defaultValue: sizeFromMarkOrConfig + defaultRef: signalOrValueRef(markDef.size) }); } else { warn(cannotApplySizeToNonOrientedMark(markDef.type)); } } // Otherwise, apply default value - const band = (_ref2 = isFieldOrDatumDef(fieldDef) ? getBand({ + const bandSize = getBandSize({ channel, fieldDef, markDef, - stack, - config - }) : undefined) !== null && _ref2 !== void 0 ? _ref2 : 1; + config, + scaleType: scale === null || scale === void 0 ? void 0 : scale.get('type'), + useVlSizeChannel + }); sizeMixins = sizeMixins || { - [vgSizeChannel]: defaultSizeRef(mark, vgSizeChannel, scaleName, scale, config, band) + [vgSizeChannel]: defaultSizeRef(vgSizeChannel, scaleName, scale, config, bandSize) }; /* Band scales with size value and all point scales, use xc/yc + band=0.5 Otherwise (band scales that has size based on a band ref), use x/y with position band = (1 - size_band) / 2. In this case, size_band is the band specified in the x/y-encoding. By default band is 1, so `(1 - band) / 2` = 0. If band is 0.6, the the x/y position in such case should be `(1 - band) / 2` = 0.2 */ - const center = (scale === null || scale === void 0 ? void 0 : scale.get('type')) !== 'band' || !('band' in sizeMixins[vgSizeChannel]); - const vgChannel = vgAlignedPositionChannel(channel, markDef, config, center ? 'middle' : 'top'); + const defaultBandAlign = (scale === null || scale === void 0 ? void 0 : scale.get('type')) !== 'band' || !('band' in sizeMixins[vgSizeChannel]) ? 'middle' : 'top'; + const vgChannel = vgAlignedPositionChannel(channel, markDef, config, defaultBandAlign); + const center = vgChannel === 'xc' || vgChannel === 'yc'; const offset = getOffset(channel, markDef); const posRef = midPointRefWithPositionInvalidTest({ channel, channelDef: fieldDef, markDef, @@ -10281,11 +11515,13 @@ defaultPos: 'mid', channel, scaleName, scale }), - band: center ? 0.5 : (1 - band) / 2 + bandPosition: center ? 0.5 : isSignalRef(bandSize) ? { + signal: `(1-${bandSize})/2` + } : isRelativeBandSize(bandSize) ? (1 - bandSize.band) / 2 : 0 }); if (vgSizeChannel) { return { [vgChannel]: posRef, @@ -10300,11 +11536,11 @@ offset } : sizeRef; return { [vgChannel]: posRef, // posRef might be an array that wraps position invalid test - [vgChannel2]: isArray(posRef) ? [posRef[0], { ...posRef[1], + [vgChannel2]: vegaUtil.isArray(posRef) ? [posRef[0], { ...posRef[1], offset: sizeOffset }] : { ...posRef, offset: sizeOffset } }; @@ -10320,13 +11556,13 @@ if (isSignalRef(reverse) || isSignalRef(offset) || isSignalRef(translate)) { const reverseExpr = signalOrStringValue(reverse); const offsetExpr = signalOrStringValue(offset); const translateExpr = signalOrStringValue(translate); - const t = translateExpr ? "".concat(translateExpr, " + ") : ''; - const r = reverseExpr ? "(".concat(reverseExpr, " ? -1 : 1) * ") : ''; - const o = offsetExpr ? "(".concat(offsetExpr, " + ").concat(spacingOffset, ")") : spacingOffset; + const t = translateExpr ? `${translateExpr} + ` : ''; + const r = reverseExpr ? `(${reverseExpr} ? -1 : 1) * ` : ''; + const o = offsetExpr ? `(${offsetExpr} + ${spacingOffset})` : spacingOffset; return { signal: t + r + o }; } else { offset = offset || 0; @@ -10336,11 +11572,11 @@ function rectBinPosition({ fieldDef, fieldDef2, channel, - band, + bandSize, scaleName, markDef, spacing = 0, axisTranslate, reverse, @@ -10348,28 +11584,33 @@ }) { const channel2 = getSecondaryRangeChannel(channel); const vgChannel = getVgPositionChannel(channel); const vgChannel2 = getVgPositionChannel(channel2); const offset = getOffset(channel, markDef); + const bandPosition = isSignalRef(bandSize) ? { + signal: `(1-${bandSize.signal})/2` + } : isRelativeBandSize(bandSize) ? (1 - bandSize.band) / 2 : 0.5; if (isBinning(fieldDef.bin) || fieldDef.timeUnit) { return { [vgChannel2]: rectBinRef({ channel, fieldDef, scaleName, markDef, - band: (1 - band) / 2, + bandPosition, offset: getBinSpacing(channel2, spacing, reverse, axisTranslate, offset), config }), [vgChannel]: rectBinRef({ channel, fieldDef, scaleName, markDef, - band: 1 - (1 - band) / 2, + bandPosition: isSignalRef(bandPosition) ? { + signal: `1-${bandPosition.signal}` + } : 1 - bandPosition, offset: getBinSpacing(channel, spacing, reverse, axisTranslate, offset), config }) }; } else if (isBinned(fieldDef.bin)) { @@ -10386,13 +11627,13 @@ }; } else if (isBinParams(fieldDef.bin) && fieldDef.bin.step) { return { [vgChannel2]: startRef, [vgChannel]: { - signal: "scale(\"".concat(scaleName, "\", ").concat(vgField(fieldDef, { - expr: 'datum' - }), " + ").concat(fieldDef.bin.step, ")"), + signal: `scale("${scaleName}", ${vgField(fieldDef, { + expr: 'datum' + })} + ${fieldDef.bin.step})`, offset: getBinSpacing(channel, spacing, reverse, axisTranslate, offset) } }; } } @@ -10407,18 +11648,18 @@ function rectBinRef({ channel, fieldDef, scaleName, markDef, - band, + bandPosition, offset, config }) { const r = interpolatedSignalRef({ scaleName, fieldOrDatumDef: fieldDef, - band, + bandPosition, offset }); return wrapPositionInvalidTest({ fieldDef, channel, @@ -10426,11 +11667,11 @@ ref: r, config }); } - const ALWAYS_IGNORE = new Set(['aria']); + const ALWAYS_IGNORE = new Set(['aria', 'width', 'height']); function baseEncodeEntry(model, ignore) { const { fill = undefined, stroke = undefined } = ignore.color === 'include' ? color(model) : {}; @@ -10442,11 +11683,11 @@ ...nonPosition('strokeOpacity', model), ...nonPosition('strokeWidth', model), ...nonPosition('strokeDash', model), ...zindex(model), ...tooltip(model), - ...text(model, 'href'), + ...text$1(model, 'href'), ...aria(model) }; } // TODO: mark VgValueRef[] as readonly after https://github.com/vega/vega/pull/1987 function wrapAllFieldsInvalid(model, channel, valueRef) { @@ -10458,11 +11699,11 @@ const invalid = getMarkPropOrConfig('invalid', markDef, config); if (invalid === 'hide' && valueRef && !isPathMark(mark)) { // For non-path marks, we have to exclude invalid values (null and NaN) for scales with continuous domains. // For path marks, we will use "defined" property and skip these values instead. - const test = allFieldsInvalidPredicate(model, { + const test = allFieldsInvalidPredicate$1(model, { invalid: true, channels: SCALE_CHANNELS }); if (test) { @@ -10470,11 +11711,11 @@ [channel]: [// prepend the invalid case // TODO: support custom value { test, value: null - }, ...array(valueRef)] + }, ...vegaUtil.array(valueRef)] }; } } return valueRef ? { @@ -10490,11 +11731,11 @@ return m; }, {}); } - function allFieldsInvalidPredicate(model, { + function allFieldsInvalidPredicate$1(model, { invalid = false, channels }) { const filterIndex = channels.reduce((aggregator, channel) => { const scaleComponent = model.getScaleComponent(channel); @@ -10514,11 +11755,11 @@ }, {}); const fields = keys(filterIndex); if (fields.length > 0) { const op = invalid ? '||' : '&&'; - return fields.map(field => fieldInvalidPredicate(field, invalid)).join(" ".concat(op, " ")); + return fields.map(field => fieldInvalidPredicate(field, invalid)).join(` ${op} `); } return undefined; } @@ -10528,11 +11769,11 @@ markDef } = model; const invalid = getMarkPropOrConfig('invalid', markDef, config); if (invalid) { - const signal = allFieldsInvalidPredicate$1(model, { + const signal = allFieldsInvalidPredicate(model, { channels: POSITION_SCALE_CHANNELS }); if (signal) { return { @@ -10544,11 +11785,11 @@ } return {}; } - function allFieldsInvalidPredicate$1(model, { + function allFieldsInvalidPredicate(model, { invalid = false, channels }) { const filterIndex = channels.reduce((aggregator, channel) => { const scaleComponent = model.getScaleComponent(channel); @@ -10568,11 +11809,11 @@ }, {}); const fields = keys(filterIndex); if (fields.length > 0) { const op = invalid ? '||' : '&&'; - return fields.map(field => fieldInvalidPredicate(field, invalid)).join(" ".concat(op, " ")); + return fields.map(field => fieldInvalidPredicate(field, invalid)).join(` ${op} `); } return undefined; } @@ -10586,12 +11827,12 @@ return undefined; } const VORONOI = 'voronoi'; const nearest = { - has: selCmpt => { - return selCmpt.type !== 'interval' && selCmpt.nearest; + defined: selCmpt => { + return selCmpt.type === 'point' && selCmpt.nearest; }, parse: (model, selCmpt) => { // Scope selection events to the voronoi mark to prevent capturing // events that occur on the group mark (https://github.com/vega/vega/issues/2112). if (selCmpt.events) { @@ -10650,14 +11891,12 @@ }] }; let index = 0; let exists = false; marks.forEach((mark, i) => { - var _mark$name; + const name = mark.name ?? ''; - const name = (_mark$name = mark.name) !== null && _mark$name !== void 0 ? _mark$name : ''; - if (name === model.component.mark[0].name) { index = i; } else if (name.indexOf(VORONOI) >= 0) { exists = true; } @@ -10669,605 +11908,66 @@ return marks; } }; - /** - * A node in the dataflow tree. - */ - - class DataFlowNode { - constructor(parent, debugName) { - this.debugName = debugName; - - _defineProperty(this, "_children", []); - - _defineProperty(this, "_parent", null); - - _defineProperty(this, "_hash", void 0); - - if (parent) { - this.parent = parent; - } - } - /** - * Clone this node with a deep copy but don't clone links to children or parents. - */ - - - clone() { - throw new Error('Cannot clone node'); - } - /** - * Return a hash of the node. - */ - - - get parent() { - return this._parent; - } - /** - * Set the parent of the node and also add this node to the parent's children. - */ - - - set parent(parent) { - this._parent = parent; - - if (parent) { - parent.addChild(this); - } - } - - get children() { - return this._children; - } - - numChildren() { - return this._children.length; - } - - addChild(child, loc) { - // do not add the same child twice - if (this._children.indexOf(child) > -1) { - warn(ADD_SAME_CHILD_TWICE); - return; - } - - if (loc !== undefined) { - this._children.splice(loc, 0, child); - } else { - this._children.push(child); - } - } - - removeChild(oldChild) { - const loc = this._children.indexOf(oldChild); - - this._children.splice(loc, 1); - - return loc; - } - /** - * Remove node from the dataflow. - */ - - - remove() { - let loc = this._parent.removeChild(this); - - for (const child of this._children) { - // do not use the set method because we want to insert at a particular location - child._parent = this._parent; - - this._parent.addChild(child, loc++); - } - } - /** - * Insert another node as a parent of this node. - */ - - - insertAsParentOf(other) { - const parent = other.parent; - parent.removeChild(this); - this.parent = parent; - other.parent = this; - } - - swapWithParent() { - const parent = this._parent; - const newParent = parent.parent; // reconnect the children - - for (const child of this._children) { - child.parent = parent; - } // remove old links - - - this._children = []; // equivalent to removing every child link one by one - - parent.removeChild(this); - parent.parent.removeChild(parent); // swap two nodes - - this.parent = newParent; - parent.parent = this; - } - - } - class OutputNode extends DataFlowNode { - clone() { - const cloneObj = new this.constructor(); - cloneObj.debugName = 'clone_' + this.debugName; - cloneObj._source = this._source; - cloneObj._name = 'clone_' + this._name; - cloneObj.type = this.type; - cloneObj.refCounts = this.refCounts; - cloneObj.refCounts[cloneObj._name] = 0; - return cloneObj; - } - /** - * @param source The name of the source. Will change in assemble. - * @param type The type of the output node. - * @param refCounts A global ref counter map. - */ - - - constructor(parent, source, type, refCounts) { - super(parent, source); - this.type = type; - this.refCounts = refCounts; - - _defineProperty(this, "_source", void 0); - - _defineProperty(this, "_name", void 0); - - this._source = this._name = source; - - if (this.refCounts && !(this._name in this.refCounts)) { - this.refCounts[this._name] = 0; - } - } - - dependentFields() { - return new Set(); - } - - producedFields() { - return new Set(); - } - - hash() { - if (this._hash === undefined) { - this._hash = "Output ".concat(uniqueId()); - } - - return this._hash; - } - /** - * Request the datasource name and increase the ref counter. - * - * During the parsing phase, this will return the simple name such as 'main' or 'raw'. - * It is crucial to request the name from an output node to mark it as a required node. - * If nobody ever requests the name, this datasource will not be instantiated in the assemble phase. - * - * In the assemble phase, this will return the correct name. - */ - - - getSource() { - this.refCounts[this._name]++; - return this._source; - } - - isRequired() { - return !!this.refCounts[this._name]; - } - - setSource(source) { - this._source = source; - } - - } - - class TimeUnitNode extends DataFlowNode { - clone() { - return new TimeUnitNode(null, duplicate(this.formula)); - } - - constructor(parent, formula) { - super(parent); - this.formula = formula; - } - - static makeFromEncoding(parent, model) { - const formula = model.reduceFieldDef((timeUnitComponent, fieldDef, channel) => { - const { - field, - timeUnit - } = fieldDef; - const channelDef2 = isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined; - const band = isUnitModel(model) && hasBand(channel, fieldDef, channelDef2, model.stack, model.markDef, model.config); - - if (timeUnit) { - const as = vgField(fieldDef, { - forAs: true - }); - timeUnitComponent[hash({ - as, - field, - timeUnit - })] = { - as, - field, - timeUnit, - ...(band ? { - band: true - } : {}) - }; - } - - return timeUnitComponent; - }, {}); - - if (isEmpty(formula)) { - return null; - } - - return new TimeUnitNode(parent, formula); - } - - static makeFromTransform(parent, t) { - const { - timeUnit, - ...other - } = { ...t - }; - const normalizedTimeUnit = normalizeTimeUnit(timeUnit); - const component = { ...other, - timeUnit: normalizedTimeUnit - }; - return new TimeUnitNode(parent, { - [hash(component)]: component - }); - } - /** - * Merge together TimeUnitNodes assigning the children of `other` to `this` - * and removing `other`. - */ - - - merge(other) { - this.formula = { ...this.formula - }; // if the same hash happen twice, merge "band" - - for (const key in other.formula) { - if (!this.formula[key] || other.formula[key].band) { - // copy if it's not a duplicate or if we need to copy band over - this.formula[key] = other.formula[key]; - } - } - - for (const child of other.children) { - other.removeChild(child); - child.parent = this; - } - - other.remove(); - } - /** - * Remove time units coming from the other node. - */ - - - removeFormulas(fields) { - const newFormula = {}; - - for (const [key, timeUnit] of entries(this.formula)) { - if (!fields.has(timeUnit.as)) { - newFormula[key] = timeUnit; - } - } - - this.formula = newFormula; - } - - producedFields() { - return new Set(vals(this.formula).map(f => f.as)); - } - - dependentFields() { - return new Set(vals(this.formula).map(f => f.field)); - } - - hash() { - return "TimeUnit ".concat(hash(this.formula)); - } - - assemble() { - const transforms = []; - - for (const f of vals(this.formula)) { - const { - field, - as, - timeUnit - } = f; - const { - unit, - utc, - ...params - } = normalizeTimeUnit(timeUnit); - transforms.push({ - field: replacePathInField(field), - type: 'timeunit', - ...(unit ? { - units: getTimeUnitParts(unit) - } : {}), - ...(utc ? { - timezone: 'utc' - } : {}), - ...params, - as: [as, "".concat(as, "_end")] - }); - } - - return transforms; - } - - } - - const TUPLE_FIELDS = '_tuple_fields'; - /** - * Whether the selection tuples hold enumerated or ranged values for a field. - */ - - class SelectionProjectionComponent { - constructor(...items) { - _defineProperty(this, "hasChannel", void 0); - - _defineProperty(this, "hasField", void 0); - - _defineProperty(this, "timeUnit", void 0); - - _defineProperty(this, "items", void 0); - - this.items = items; - this.hasChannel = {}; - this.hasField = {}; - } - - } - const project = { - has: () => { - return true; // This transform handles its own defaults, so always run parse. - }, - parse: (model, selCmpt, selDef) => { - var _selCmpt$project; - - const name = selCmpt.name; - const proj = (_selCmpt$project = selCmpt.project) !== null && _selCmpt$project !== void 0 ? _selCmpt$project : selCmpt.project = new SelectionProjectionComponent(); - const parsed = {}; - const timeUnits = {}; - const signals = new Set(); - - const signalName = (p, range) => { - const suffix = range === 'visual' ? p.channel : p.field; - let sg = varName("".concat(name, "_").concat(suffix)); - - for (let counter = 1; signals.has(sg); counter++) { - sg = varName("".concat(name, "_").concat(suffix, "_").concat(counter)); - } - - signals.add(sg); - return { - [range]: sg - }; - }; // If no explicit projection (either fields or encodings) is specified, set some defaults. - // If an initial value is set, try to infer projections. - // Otherwise, use the default configuration. - - - if (!selDef.fields && !selDef.encodings) { - const cfg = model.config.selection[selDef.type]; - - if (selDef.init) { - for (const init of array(selDef.init)) { - for (const key of keys(init)) { - if (isSingleDefUnitChannel(key)) { - (selDef.encodings || (selDef.encodings = [])).push(key); - } else { - if (selDef.type === 'interval') { - warn(INTERVAL_INITIALIZED_WITH_X_Y); - selDef.encodings = cfg.encodings; - } else { - (selDef.fields || (selDef.fields = [])).push(key); - } - } - } - } - } else { - selDef.encodings = cfg.encodings; - selDef.fields = cfg.fields; - } - } // TODO: find a possible channel mapping for these fields. - - - for (const field of (_selDef$fields = selDef.fields) !== null && _selDef$fields !== void 0 ? _selDef$fields : []) { - var _selDef$fields; - - const p = { - type: 'E', - field - }; - p.signals = { ...signalName(p, 'data') - }; - proj.items.push(p); - proj.hasField[field] = p; - } - - for (const channel of (_selDef$encodings = selDef.encodings) !== null && _selDef$encodings !== void 0 ? _selDef$encodings : []) { - var _selDef$encodings; - - const fieldDef = model.fieldDef(channel); - - if (fieldDef) { - let field = fieldDef.field; - - if (fieldDef.aggregate) { - warn(cannotProjectAggregate(channel, fieldDef.aggregate)); - continue; - } else if (!field) { - warn(cannotProjectOnChannelWithoutField(channel)); - continue; - } - - if (fieldDef.timeUnit) { - field = model.vgField(channel); // Construct TimeUnitComponents which will be combined into a - // TimeUnitNode. This node may need to be inserted into the - // dataflow if the selection is used across views that do not - // have these time units defined. - - const component = { - timeUnit: fieldDef.timeUnit, - as: field, - field: fieldDef.field - }; - timeUnits[hash(component)] = component; - } // Prevent duplicate projections on the same field. - // TODO: what if the same field is bound to multiple channels (e.g., SPLOM diag). - - - if (!parsed[field]) { - // Determine whether the tuple will store enumerated or ranged values. - // Interval selections store ranges for continuous scales, and enumerations otherwise. - // Single/multi selections store ranges for binned fields, and enumerations otherwise. - let type = 'E'; - - if (selCmpt.type === 'interval') { - const scaleType = model.getScaleComponent(channel).get('type'); - - if (hasContinuousDomain(scaleType)) { - type = 'R'; - } - } else if (fieldDef.bin) { - type = 'R-RE'; - } - - const p = { - field, - channel, - type - }; - p.signals = { ...signalName(p, 'data'), - ...signalName(p, 'visual') - }; - proj.items.push(parsed[field] = p); - proj.hasField[field] = proj.hasChannel[channel] = parsed[field]; - } - } else { - warn(cannotProjectOnChannelWithoutField(channel)); - } - } - - if (selDef.init) { - const parseInit = i => { - return proj.items.map(p => i[p.channel] !== undefined ? i[p.channel] : i[p.field]); - }; - - if (selDef.type === 'interval') { - selCmpt.init = parseInit(selDef.init); - } else { - const init = array(selDef.init); - selCmpt.init = init.map(parseInit); - } - } - - if (!isEmpty(timeUnits)) { - proj.timeUnit = new TimeUnitNode(null, timeUnits); - } - }, - signals: (model, selCmpt, allSignals) => { - const name = selCmpt.name + TUPLE_FIELDS; - const hasSignal = allSignals.filter(s => s.name === name); - return hasSignal.length > 0 ? allSignals : allSignals.concat({ - name, - value: selCmpt.project.items.map(proj => { - const { - signals, - hasLegend, - ...rest - } = proj; - rest.field = replacePathInField(rest.field); - return rest; - }) - }); - } - }; - const inputBindings = { - has: selCmpt => { - return selCmpt.type === 'single' && selCmpt.resolve === 'global' && selCmpt.bind && selCmpt.bind !== 'scales' && !isLegendBinding(selCmpt.bind); + defined: selCmpt => { + return selCmpt.type === 'point' && selCmpt.resolve === 'global' && selCmpt.bind && selCmpt.bind !== 'scales' && !isLegendBinding(selCmpt.bind); }, - parse: (model, selCmpt, selDef, origDef) => { - // Binding a selection to input widgets disables default direct manipulation interaction. - // A user can choose to re-enable it by explicitly specifying triggering input events. - if (!origDef.on) delete selCmpt.events; - if (!origDef.clear) delete selCmpt.clear; - }, + parse: (model, selCmpt, selDef) => disableDirectManipulation(selCmpt, selDef), topLevelSignals: (model, selCmpt, signals) => { const name = selCmpt.name; const proj = selCmpt.project; const bind = selCmpt.bind; const init = selCmpt.init && selCmpt.init[0]; // Can only exist on single selections (one initial value). - const datum = nearest.has(selCmpt) ? '(item().isVoronoi ? datum.datum : datum)' : 'datum'; + const datum = nearest.defined(selCmpt) ? '(item().isVoronoi ? datum.datum : datum)' : 'datum'; proj.items.forEach((p, i) => { - const sgname = varName("".concat(name, "_").concat(p.field)); + const sgname = varName(`${name}_${p.field}`); const hasSignal = signals.filter(s => s.name === sgname); if (!hasSignal.length) { - var _ref, _bind$p$field; - signals.unshift({ name: sgname, ...(init ? { init: assembleInit(init[i]) } : { value: null }), on: selCmpt.events ? [{ events: selCmpt.events, - update: "datum && item().mark.marktype !== 'group' ? ".concat(datum, "[").concat($(p.field), "] : null") + update: `datum && item().mark.marktype !== 'group' ? ${datum}[${vegaUtil.stringValue(p.field)}] : null` }] : [], - bind: (_ref = (_bind$p$field = bind[p.field]) !== null && _bind$p$field !== void 0 ? _bind$p$field : bind[p.channel]) !== null && _ref !== void 0 ? _ref : bind + bind: bind[p.field] ?? bind[p.channel] ?? bind }); } }); return signals; }, signals: (model, selCmpt, signals) => { const name = selCmpt.name; const proj = selCmpt.project; const signal = signals.filter(s => s.name === name + TUPLE)[0]; const fields = name + TUPLE_FIELDS; - const values = proj.items.map(p => varName("".concat(name, "_").concat(p.field))); - const valid = values.map(v => "".concat(v, " !== null")).join(' && '); + const values = proj.items.map(p => varName(`${name}_${p.field}`)); + const valid = values.map(v => `${v} !== null`).join(' && '); if (values.length) { - signal.update = "".concat(valid, " ? {fields: ").concat(fields, ", values: [").concat(values.join(', '), "]} : null"); + signal.update = `${valid} ? {fields: ${fields}, values: [${values.join(', ')}]} : null`; } delete signal.value; delete signal.on; return signals; } }; const TOGGLE = '_toggle'; const toggle = { - has: selCmpt => { - return selCmpt.type === 'multi' && !!selCmpt.toggle; + defined: selCmpt => { + return selCmpt.type === 'point' && !!selCmpt.toggle; }, signals: (model, selCmpt, signals) => { return signals.concat({ name: selCmpt.name + TOGGLE, value: false, @@ -11278,27 +11978,27 @@ }); }, modifyExpr: (model, selCmpt) => { const tpl = selCmpt.name + TUPLE; const signal = selCmpt.name + TOGGLE; - return "".concat(signal, " ? null : ").concat(tpl, ", ") + (selCmpt.resolve === 'global' ? "".concat(signal, " ? null : true, ") : "".concat(signal, " ? null : {unit: ").concat(unitName(model), "}, ")) + "".concat(signal, " ? ").concat(tpl, " : null"); + return `${signal} ? null : ${tpl}, ` + (selCmpt.resolve === 'global' ? `${signal} ? null : true, ` : `${signal} ? null : {unit: ${unitName(model)}}, `) + `${signal} ? ${tpl} : null`; } }; const clear = { - has: selCmpt => { + defined: selCmpt => { return selCmpt.clear !== undefined && selCmpt.clear !== false; }, - parse: (model, selCmpt, selDef) => { - if (selDef.clear) { - selCmpt.clear = isString(selDef.clear) ? eventSelector(selDef.clear, 'scope') : selDef.clear; + parse: (model, selCmpt) => { + if (selCmpt.clear) { + selCmpt.clear = vegaUtil.isString(selCmpt.clear) ? eventSelector(selCmpt.clear, 'view') : selCmpt.clear; } }, topLevelSignals: (model, selCmpt, signals) => { - if (inputBindings.has(selCmpt)) { + if (inputBindings.defined(selCmpt)) { for (const proj of selCmpt.project.items) { - const idx = signals.findIndex(n => n.name === varName("".concat(selCmpt.name, "_").concat(proj.field))); + const idx = signals.findIndex(n => n.name === varName(`${selCmpt.name}_${proj.field}`)); if (idx !== -1) { signals[idx].on.push({ events: selCmpt.clear, update: 'null' @@ -11332,143 +12032,56 @@ } } else { let tIdx = signals.findIndex(n => n.name === selCmpt.name + TUPLE); addClear(tIdx, 'null'); - if (toggle.has(selCmpt)) { + if (toggle.defined(selCmpt)) { tIdx = signals.findIndex(n => n.name === selCmpt.name + TOGGLE); addClear(tIdx, 'false'); } } return signals; } }; - const scaleBindings = { - has: selCmpt => { - return selCmpt.type === 'interval' && selCmpt.resolve === 'global' && selCmpt.bind && selCmpt.bind === 'scales'; - }, - parse: (model, selCmpt) => { - const bound = selCmpt.scales = []; - - for (const proj of selCmpt.project.items) { - const channel = proj.channel; - - if (!isScaleChannel(channel)) { - continue; - } - - const scale = model.getScaleComponent(channel); - const scaleType = scale ? scale.get('type') : undefined; - - if (!scale || !hasContinuousDomain(scaleType)) { - warn(SCALE_BINDINGS_CONTINUOUS); - continue; - } - - const extent = { - selection: selCmpt.name, - field: proj.field - }; - scale.set('selectionExtent', extent, true); - bound.push(proj); - } - }, - topLevelSignals: (model, selCmpt, signals) => { - const bound = selCmpt.scales.filter(proj => signals.filter(s => s.name === proj.signals.data).length === 0); // Top-level signals are only needed for multiview displays and if this - // view's top-level signals haven't already been generated. - - if (!model.parent || isTopLevelLayer(model) || bound.length === 0) { - return signals; - } // vlSelectionResolve does not account for the behavior of bound scales in - // multiview displays. Each unit view adds a tuple to the store, but the - // state of the selection is the unit selection most recently updated. This - // state is captured by the top-level signals that we insert and "push - // outer" to from within the units. We need to reassemble this state into - // the top-level named signal, except no single selCmpt has a global view. - - - const namedSg = signals.filter(s => s.name === selCmpt.name)[0]; - let update = namedSg.update; - - if (update.indexOf(VL_SELECTION_RESOLVE) >= 0) { - namedSg.update = "{".concat(bound.map(proj => "".concat($(proj.field), ": ").concat(proj.signals.data)).join(', '), "}"); - } else { - for (const proj of bound) { - const mapping = "".concat($(proj.field), ": ").concat(proj.signals.data); - - if (update.indexOf(mapping) < 0) { - update = "".concat(update.substring(0, update.length - 1), ", ").concat(mapping, "}"); - } - } - - namedSg.update = update; - } - - return signals.concat(bound.map(proj => ({ - name: proj.signals.data - }))); - }, - signals: (model, selCmpt, signals) => { - // Nested signals need only push to top-level signals with multiview displays. - if (model.parent && !isTopLevelLayer(model)) { - for (const proj of selCmpt.scales) { - const signal = signals.filter(s => s.name === proj.signals.data)[0]; - signal.push = 'outer'; - delete signal.value; - delete signal.update; - } - } - - return signals; - } - }; - function domain$1(model, channel) { - const scale = $(model.scaleName(channel)); - return "domain(".concat(scale, ")"); - } - - function isTopLevelLayer(model) { - var _model$parent$parent; - - return model.parent && isLayerModel(model.parent) && ((_model$parent$parent = !model.parent.parent) !== null && _model$parent$parent !== void 0 ? _model$parent$parent : isTopLevelLayer(model.parent.parent)); - } - const legendBindings = { - has: selCmpt => { + defined: selCmpt => { const spec = selCmpt.resolve === 'global' && selCmpt.bind && isLegendBinding(selCmpt.bind); const projLen = selCmpt.project.items.length === 1 && selCmpt.project.items[0].field !== SELECTION_ID; if (spec && !projLen) { warn(LEGEND_BINDINGS_MUST_HAVE_PROJECTION); } return spec && projLen; }, - parse: (model, selCmpt, selDef, origDef) => { - // Binding a selection to a legend disables default direct manipulation interaction. - // A user can choose to re-enable it by explicitly specifying triggering input events. - if (!origDef.on) delete selCmpt.events; - if (!origDef.clear) delete selCmpt.clear; + parse: (model, selCmpt, selDef) => { + // Allow legend items to be toggleable by default even though direct manipulation is disabled. + const selDef_ = duplicate(selDef); + selDef_.select = vegaUtil.isString(selDef_.select) ? { + type: selDef_.select, + toggle: selCmpt.toggle + } : { ...selDef_.select, + toggle: selCmpt.toggle + }; + disableDirectManipulation(selCmpt, selDef_); - if (origDef.on || origDef.clear) { + if (vega.isObject(selDef.select) && (selDef.select.on || selDef.select.clear)) { const legendFilter = 'event.item && indexof(event.item.mark.role, "legend") < 0'; for (const evt of selCmpt.events) { - var _evt$filter; + evt.filter = vegaUtil.array(evt.filter ?? []); - evt.filter = array((_evt$filter = evt.filter) !== null && _evt$filter !== void 0 ? _evt$filter : []); - - if (evt.filter.indexOf(legendFilter) < 0) { + if (!evt.filter.includes(legendFilter)) { evt.filter.push(legendFilter); } } } const evt = isLegendStreamBinding(selCmpt.bind) ? selCmpt.bind.legend : 'click'; - const stream = isString(evt) ? eventSelector(evt, 'view') : array(evt); + const stream = vegaUtil.isString(evt) ? eventSelector(evt, 'view') : vegaUtil.array(evt); selCmpt.bind = { legend: { merge: stream } }; @@ -11483,16 +12096,16 @@ return ds; }; for (const proj of selCmpt.project.items) { if (!proj.hasLegend) continue; - const prefix = "".concat(varName(proj.field), "_legend"); - const sgName = "".concat(selName, "_").concat(prefix); + const prefix = `${varName(proj.field)}_legend`; + const sgName = `${selName}_${prefix}`; const hasSignal = signals.filter(s => s.name === sgName); if (hasSignal.length === 0) { - const events = stream.merge.map(markName("".concat(prefix, "_symbols"))).concat(stream.merge.map(markName("".concat(prefix, "_labels")))).concat(stream.merge.map(markName("".concat(prefix, "_entries")))); + const events = stream.merge.map(markName(`${prefix}_symbols`)).concat(stream.merge.map(markName(`${prefix}_labels`))).concat(stream.merge.map(markName(`${prefix}_entries`))); signals.unshift({ name: sgName, ...(!selCmpt.init ? { value: null } : {}), @@ -11501,11 +12114,11 @@ events, update: 'datum.value || item().items[0].items[0].datum.value', force: true }, { events: stream.merge, - update: "!event.item || !datum ? null : ".concat(sgName), + update: `!event.item || !datum ? null : ${sgName}`, force: true }] }); } } @@ -11515,13 +12128,13 @@ signals: (model, selCmpt, signals) => { const name = selCmpt.name; const proj = selCmpt.project; const tuple = signals.find(s => s.name === name + TUPLE); const fields = name + TUPLE_FIELDS; - const values = proj.items.filter(p => p.hasLegend).map(p => varName("".concat(name, "_").concat(varName(p.field), "_legend"))); - const valid = values.map(v => "".concat(v, " !== null")).join(' && '); - const update = "".concat(valid, " ? {fields: ").concat(fields, ", values: [").concat(values.join(', '), "]} : null"); + const values = proj.items.filter(p => p.hasLegend).map(p => varName(`${name}_${varName(p.field)}_legend`)); + const valid = values.map(v => `${v} !== null`).join(' && '); + const update = `${valid} ? {fields: ${fields}, values: [${values.join(', ')}]} : null`; if (selCmpt.events && values.length > 0) { tuple.on.push({ events: values.map(signal => ({ signal @@ -11548,36 +12161,33 @@ }; function parseInteractiveLegend(model, channel, legendCmpt) { var _model$fieldDef; const field = (_model$fieldDef = model.fieldDef(channel)) === null || _model$fieldDef === void 0 ? void 0 : _model$fieldDef.field; - forEachSelection(model, selCmpt => { - var _selCmpt$project$hasF; - const proj = (_selCmpt$project$hasF = selCmpt.project.hasField[field]) !== null && _selCmpt$project$hasF !== void 0 ? _selCmpt$project$hasF : selCmpt.project.hasChannel[channel]; + for (const selCmpt of vals(model.component.selection ?? {})) { + const proj = selCmpt.project.hasField[field] ?? selCmpt.project.hasChannel[channel]; - if (proj && legendBindings.has(selCmpt)) { - var _legendCmpt$get; - - const legendSelections = (_legendCmpt$get = legendCmpt.get('selections')) !== null && _legendCmpt$get !== void 0 ? _legendCmpt$get : []; + if (proj && legendBindings.defined(selCmpt)) { + const legendSelections = legendCmpt.get('selections') ?? []; legendSelections.push(selCmpt.name); legendCmpt.set('selections', legendSelections, false); proj.hasLegend = true; } - }); + } } - const ANCHOR = '_translate_anchor'; - const DELTA = '_translate_delta'; + const ANCHOR$1 = '_translate_anchor'; + const DELTA$1 = '_translate_delta'; const translate = { - has: selCmpt => { + defined: selCmpt => { return selCmpt.type === 'interval' && selCmpt.translate; }, signals: (model, selCmpt, signals) => { const name = selCmpt.name; - const hasScales = scaleBindings.has(selCmpt); - const anchor = name + ANCHOR; + const hasScales = scaleBindings.defined(selCmpt); + const anchor = name + ANCHOR$1; const { x, y } = selCmpt.project.hasChannel; let events = eventSelector(selCmpt.translate, 'scope'); @@ -11589,86 +12199,86 @@ signals.push({ name: anchor, value: {}, on: [{ events: events.map(e => e.between[0]), - update: '{x: x(unit), y: y(unit)' + (x !== undefined ? ', extent_x: ' + (hasScales ? domain$1(model, X) : "slice(".concat(x.signals.visual, ")")) : '') + (y !== undefined ? ', extent_y: ' + (hasScales ? domain$1(model, Y) : "slice(".concat(y.signals.visual, ")")) : '') + '}' + update: '{x: x(unit), y: y(unit)' + (x !== undefined ? `, extent_x: ${hasScales ? domain(model, X) : `slice(${x.signals.visual})`}` : '') + (y !== undefined ? `, extent_y: ${hasScales ? domain(model, Y) : `slice(${y.signals.visual})`}` : '') + '}' }] }, { - name: name + DELTA, + name: name + DELTA$1, value: {}, on: [{ events: events, - update: "{x: ".concat(anchor, ".x - x(unit), y: ").concat(anchor, ".y - y(unit)}") + update: `{x: ${anchor}.x - x(unit), y: ${anchor}.y - y(unit)}` }] }); if (x !== undefined) { - onDelta(model, selCmpt, x, 'width', signals); + onDelta$1(model, selCmpt, x, 'width', signals); } if (y !== undefined) { - onDelta(model, selCmpt, y, 'height', signals); + onDelta$1(model, selCmpt, y, 'height', signals); } return signals; } }; - function onDelta(model, selCmpt, proj, size, signals) { - var _scaleCmpt$get; - + function onDelta$1(model, selCmpt, proj, size, signals) { const name = selCmpt.name; - const anchor = name + ANCHOR; - const delta = name + DELTA; + const anchor = name + ANCHOR$1; + const delta = name + DELTA$1; const channel = proj.channel; - const hasScales = scaleBindings.has(selCmpt); + const hasScales = scaleBindings.defined(selCmpt); const signal = signals.filter(s => s.name === proj.signals[hasScales ? 'data' : 'visual'])[0]; const sizeSg = model.getSizeSignalRef(size).signal; const scaleCmpt = model.getScaleComponent(channel); const scaleType = scaleCmpt.get('type'); - const sign = hasScales && channel === X ? '-' : ''; // Invert delta when panning x-scales. + const reversed = scaleCmpt.get('reverse'); // scale parsing sets this flag for fieldDef.sort - const extent = "".concat(anchor, ".extent_").concat(channel); - const offset = "".concat(sign).concat(delta, ".").concat(channel, " / ") + (hasScales ? "".concat(sizeSg) : "span(".concat(extent, ")")); - const panFn = !hasScales ? 'panLinear' : scaleType === 'log' ? 'panLog' : scaleType === 'pow' ? 'panPow' : 'panLinear'; - const update = "".concat(panFn, "(").concat(extent, ", ").concat(offset) + (hasScales && scaleType === 'pow' ? ", ".concat((_scaleCmpt$get = scaleCmpt.get('exponent')) !== null && _scaleCmpt$get !== void 0 ? _scaleCmpt$get : 1) : '') + ')'; + const sign = !hasScales ? '' : channel === X ? reversed ? '' : '-' : reversed ? '-' : ''; + const extent = `${anchor}.extent_${channel}`; + const offset = `${sign}${delta}.${channel} / ${hasScales ? `${sizeSg}` : `span(${extent})`}`; + const panFn = !hasScales ? 'panLinear' : scaleType === 'log' ? 'panLog' : scaleType === 'symlog' ? 'panSymlog' : scaleType === 'pow' ? 'panPow' : 'panLinear'; + const arg = !hasScales ? '' : scaleType === 'pow' ? `, ${scaleCmpt.get('exponent') ?? 1}` : scaleType === 'symlog' ? `, ${scaleCmpt.get('constant') ?? 1}` : ''; + const update = `${panFn}(${extent}, ${offset}${arg})`; signal.on.push({ events: { signal: delta }, - update: hasScales ? update : "clampRange(".concat(update, ", 0, ").concat(sizeSg, ")") + update: hasScales ? update : `clampRange(${update}, 0, ${sizeSg})` }); } - const ANCHOR$1 = '_zoom_anchor'; - const DELTA$1 = '_zoom_delta'; + const ANCHOR = '_zoom_anchor'; + const DELTA = '_zoom_delta'; const zoom = { - has: selCmpt => { + defined: selCmpt => { return selCmpt.type === 'interval' && selCmpt.zoom; }, signals: (model, selCmpt, signals) => { const name = selCmpt.name; - const hasScales = scaleBindings.has(selCmpt); - const delta = name + DELTA$1; + const hasScales = scaleBindings.defined(selCmpt); + const delta = name + DELTA; const { x, y } = selCmpt.project.hasChannel; - const sx = $(model.scaleName(X)); - const sy = $(model.scaleName(Y)); + const sx = vegaUtil.stringValue(model.scaleName(X)); + const sy = vegaUtil.stringValue(model.scaleName(Y)); let events = eventSelector(selCmpt.zoom, 'scope'); if (!hasScales) { events = events.map(e => (e.markname = name + BRUSH, e)); } signals.push({ - name: name + ANCHOR$1, + name: name + ANCHOR, on: [{ events: events, - update: !hasScales ? "{x: x(unit), y: y(unit)}" : '{' + [sx ? "x: invert(".concat(sx, ", x(unit))") : '', sy ? "y: invert(".concat(sy, ", y(unit))") : ''].filter(expr => !!expr).join(', ') + '}' + update: !hasScales ? `{x: x(unit), y: y(unit)}` : '{' + [sx ? `x: invert(${sx}, x(unit))` : '', sy ? `y: invert(${sy}, y(unit))` : ''].filter(expr => !!expr).join(', ') + '}' }] }, { name: delta, on: [{ events: events, @@ -11676,571 +12286,56 @@ update: 'pow(1.001, event.deltaY * pow(16, event.deltaMode))' }] }); if (x !== undefined) { - onDelta$1(model, selCmpt, x, 'width', signals); + onDelta(model, selCmpt, x, 'width', signals); } if (y !== undefined) { - onDelta$1(model, selCmpt, y, 'height', signals); + onDelta(model, selCmpt, y, 'height', signals); } return signals; } }; - function onDelta$1(model, selCmpt, proj, size, signals) { - var _scaleCmpt$get; - + function onDelta(model, selCmpt, proj, size, signals) { const name = selCmpt.name; const channel = proj.channel; - const hasScales = scaleBindings.has(selCmpt); + const hasScales = scaleBindings.defined(selCmpt); const signal = signals.filter(s => s.name === proj.signals[hasScales ? 'data' : 'visual'])[0]; const sizeSg = model.getSizeSignalRef(size).signal; const scaleCmpt = model.getScaleComponent(channel); const scaleType = scaleCmpt.get('type'); - const base = hasScales ? domain$1(model, channel) : signal.name; - const delta = name + DELTA$1; - const anchor = "".concat(name).concat(ANCHOR$1, ".").concat(channel); - const zoomFn = !hasScales ? 'zoomLinear' : scaleType === 'log' ? 'zoomLog' : scaleType === 'pow' ? 'zoomPow' : 'zoomLinear'; - const update = "".concat(zoomFn, "(").concat(base, ", ").concat(anchor, ", ").concat(delta) + (hasScales && scaleType === 'pow' ? ", ".concat((_scaleCmpt$get = scaleCmpt.get('exponent')) !== null && _scaleCmpt$get !== void 0 ? _scaleCmpt$get : 1) : '') + ')'; + const base = hasScales ? domain(model, channel) : signal.name; + const delta = name + DELTA; + const anchor = `${name}${ANCHOR}.${channel}`; + const zoomFn = !hasScales ? 'zoomLinear' : scaleType === 'log' ? 'zoomLog' : scaleType === 'symlog' ? 'zoomSymlog' : scaleType === 'pow' ? 'zoomPow' : 'zoomLinear'; + const arg = !hasScales ? '' : scaleType === 'pow' ? `, ${scaleCmpt.get('exponent') ?? 1}` : scaleType === 'symlog' ? `, ${scaleCmpt.get('constant') ?? 1}` : ''; + const update = `${zoomFn}(${base}, ${anchor}, ${delta}${arg})`; signal.on.push({ events: { signal: delta }, - update: hasScales ? update : "clampRange(".concat(update, ", 0, ").concat(sizeSg, ")") + update: hasScales ? update : `clampRange(${update}, 0, ${sizeSg})` }); } - const compilers = [project, toggle, scaleBindings, legendBindings, translate, zoom, inputBindings, nearest, clear]; - function forEachTransform(selCmpt, cb) { - for (const t of compilers) { - if (t.has(selCmpt)) { - cb(t); - } - } - } - - function assembleInit(init, isExpr = true, wrap = identity) { - if (isArray(init)) { - const assembled = init.map(v => assembleInit(v, isExpr, wrap)); - return isExpr ? "[".concat(assembled.join(', '), "]") : assembled; - } else if (isDateTime(init)) { - if (isExpr) { - return wrap(dateTimeToExpr(init)); - } else { - return wrap(dateTimeToTimestamp(init)); - } - } - - return isExpr ? wrap(JSON.stringify(init)) : init; - } - function assembleUnitSelectionSignals(model, signals) { - forEachSelection(model, (selCmpt, selCompiler) => { - const name = selCmpt.name; - let modifyExpr = selCompiler.modifyExpr(model, selCmpt); - signals.push(...selCompiler.signals(model, selCmpt)); - forEachTransform(selCmpt, txCompiler => { - if (txCompiler.signals) { - signals = txCompiler.signals(model, selCmpt, signals); - } - - if (txCompiler.modifyExpr) { - modifyExpr = txCompiler.modifyExpr(model, selCmpt, modifyExpr); - } - }); - signals.push({ - name: name + MODIFY, - on: [{ - events: { - signal: selCmpt.name + TUPLE - }, - update: "modify(".concat($(selCmpt.name + STORE), ", ").concat(modifyExpr, ")") - }] - }); - }); - return cleanupEmptyOnArray(signals); - } - function assembleFacetSignals(model, signals) { - if (model.component.selection && keys(model.component.selection).length) { - const name = $(model.getName('cell')); - signals.unshift({ - name: 'facet', - value: {}, - on: [{ - events: eventSelector('mousemove', 'scope'), - update: "isTuple(facet) ? facet : group(".concat(name, ").datum") - }] - }); - } - - return cleanupEmptyOnArray(signals); - } - function assembleTopLevelSignals(model, signals) { - let hasSelections = false; - forEachSelection(model, (selCmpt, selCompiler) => { - const name = selCmpt.name; - const store = $(name + STORE); - const hasSg = signals.filter(s => s.name === name); - - if (hasSg.length === 0) { - const resolve = selCmpt.resolve === 'global' ? 'union' : selCmpt.resolve; - const isMulti = selCmpt.type === 'multi' ? ', true)' : ')'; - signals.push({ - name: selCmpt.name, - update: "".concat(VL_SELECTION_RESOLVE, "(").concat(store, ", ").concat($(resolve)).concat(isMulti) - }); - } - - hasSelections = true; - - if (selCompiler.topLevelSignals) { - signals = selCompiler.topLevelSignals(model, selCmpt, signals); - } - - forEachTransform(selCmpt, txCompiler => { - if (txCompiler.topLevelSignals) { - signals = txCompiler.topLevelSignals(model, selCmpt, signals); - } - }); - }); - - if (hasSelections) { - const hasUnit = signals.filter(s => s.name === 'unit'); - - if (hasUnit.length === 0) { - signals.unshift({ - name: 'unit', - value: {}, - on: [{ - events: 'mousemove', - update: 'isTuple(group()) ? group() : unit' - }] - }); - } - } - - return cleanupEmptyOnArray(signals); - } - function assembleUnitSelectionData(model, data) { - const dataCopy = [...data]; - forEachSelection(model, selCmpt => { - const init = { - name: selCmpt.name + STORE - }; - - if (selCmpt.init) { - const fields = selCmpt.project.items.map(proj => { - const { - signals, - ...rest - } = proj; - return rest; - }); - const insert = selCmpt.init.map(i => assembleInit(i, false)); - init.values = selCmpt.type === 'interval' ? [{ - unit: unitName(model, { - escape: false - }), - fields, - values: insert - }] : insert.map(i => ({ - unit: unitName(model, { - escape: false - }), - fields, - values: i - })); - } - - const contains = dataCopy.filter(d => d.name === selCmpt.name + STORE); - - if (!contains.length) { - dataCopy.push(init); - } - }); - return dataCopy; - } - function assembleUnitSelectionMarks(model, marks) { - forEachSelection(model, (selCmpt, selCompiler) => { - marks = selCompiler.marks ? selCompiler.marks(model, selCmpt, marks) : marks; - forEachTransform(selCmpt, txCompiler => { - if (txCompiler.marks) { - marks = txCompiler.marks(model, selCmpt, marks); - } - }); - }); - return marks; - } - function assembleLayerSelectionMarks(model, marks) { - for (const child of model.children) { - if (isUnitModel(child)) { - marks = assembleUnitSelectionMarks(child, marks); - } - } - - return marks; - } - function assembleSelectionScaleDomain(model, extent) { - const name = extent.selection; - const selCmpt = model.getSelectionComponent(name, varName(name)); - return { - signal: parseSelectionBinExtent(selCmpt, extent) - }; - } - - function cleanupEmptyOnArray(signals) { - return signals.map(s => { - if (s.on && !s.on.length) delete s.on; - return s; - }); - } - - const BRUSH = '_brush'; - const SCALE_TRIGGER = '_scale_trigger'; - const interval = { - signals: (model, selCmpt) => { - const name = selCmpt.name; - const fieldsSg = name + TUPLE_FIELDS; - const hasScales = scaleBindings.has(selCmpt); - const signals = []; - const dataSignals = []; - const scaleTriggers = []; - - if (selCmpt.translate && !hasScales) { - const filterExpr = "!event.item || event.item.mark.name !== ".concat($(name + BRUSH)); - events(selCmpt, (on, evt) => { - var _evt$between$0$filter; - - const filters = array((_evt$between$0$filter = evt.between[0].filter) !== null && _evt$between$0$filter !== void 0 ? _evt$between$0$filter : evt.between[0].filter = []); - - if (filters.indexOf(filterExpr) < 0) { - filters.push(filterExpr); - } - - return on; - }); - } - - selCmpt.project.items.forEach((proj, i) => { - const channel = proj.channel; - - if (channel !== X && channel !== Y) { - warn('Interval selections only support x and y encoding channels.'); - return; - } - - const init = selCmpt.init ? selCmpt.init[i] : null; - const cs = channelSignals(model, selCmpt, proj, init); - const dname = proj.signals.data; - const vname = proj.signals.visual; - const scaleName = $(model.scaleName(channel)); - const scaleType = model.getScaleComponent(channel).get('type'); - const toNum = hasContinuousDomain(scaleType) ? '+' : ''; - signals.push(...cs); - dataSignals.push(dname); - scaleTriggers.push({ - scaleName: model.scaleName(channel), - expr: "(!isArray(".concat(dname, ") || ") + "(".concat(toNum, "invert(").concat(scaleName, ", ").concat(vname, ")[0] === ").concat(toNum).concat(dname, "[0] && ") + "".concat(toNum, "invert(").concat(scaleName, ", ").concat(vname, ")[1] === ").concat(toNum).concat(dname, "[1]))") - }); - }); // Proxy scale reactions to ensure that an infinite loop doesn't occur - // when an interval selection filter touches the scale. - - if (!hasScales) { - signals.push({ - name: name + SCALE_TRIGGER, - value: {}, - on: [{ - events: scaleTriggers.map(t => ({ - scale: t.scaleName - })), - update: scaleTriggers.map(t => t.expr).join(' && ') + " ? ".concat(name + SCALE_TRIGGER, " : {}") - }] - }); - } // Only add an interval to the store if it has valid data extents. Data extents - // are set to null if pixel extents are equal to account for intervals over - // ordinal/nominal domains which, when inverted, will still produce a valid datum. - - - const init = selCmpt.init; - const update = "unit: ".concat(unitName(model), ", fields: ").concat(fieldsSg, ", values"); - return signals.concat({ - name: name + TUPLE, - ...(init ? { - init: "{".concat(update, ": ").concat(assembleInit(init), "}") - } : {}), - on: [{ - events: [{ - signal: dataSignals.join(' || ') - }], - // Prevents double invocation, see https://github.com/vega/vega#1672. - update: dataSignals.join(' && ') + " ? {".concat(update, ": [").concat(dataSignals, "]} : null") - }] - }); - }, - modifyExpr: (model, selCmpt) => { - const tpl = selCmpt.name + TUPLE; - return tpl + ', ' + (selCmpt.resolve === 'global' ? 'true' : "{unit: ".concat(unitName(model), "}")); - }, - marks: (model, selCmpt, marks) => { - const name = selCmpt.name; - const { - x, - y - } = selCmpt.project.hasChannel; - const xvname = x && x.signals.visual; - const yvname = y && y.signals.visual; - const store = "data(".concat($(selCmpt.name + STORE), ")"); // Do not add a brush if we're binding to scales. - - if (scaleBindings.has(selCmpt)) { - return marks; - } - - const update = { - x: x !== undefined ? { - signal: "".concat(xvname, "[0]") - } : { - value: 0 - }, - y: y !== undefined ? { - signal: "".concat(yvname, "[0]") - } : { - value: 0 - }, - x2: x !== undefined ? { - signal: "".concat(xvname, "[1]") - } : { - field: { - group: 'width' - } - }, - y2: y !== undefined ? { - signal: "".concat(yvname, "[1]") - } : { - field: { - group: 'height' - } - } - }; // If the selection is resolved to global, only a single interval is in - // the store. Wrap brush mark's encodings with a production rule to test - // this based on the `unit` property. Hide the brush mark if it corresponds - // to a unit different from the one in the store. - - if (selCmpt.resolve === 'global') { - for (const key of keys(update)) { - update[key] = [{ - test: "".concat(store, ".length && ").concat(store, "[0].unit === ").concat(unitName(model)), - ...update[key] - }, { - value: 0 - }]; - } - } // Two brush marks ensure that fill colors and other aesthetic choices do - // not interefere with the core marks, but that the brushed region can still - // be interacted with (e.g., dragging it around). - - - const { - fill, - fillOpacity, - cursor, - ...stroke - } = selCmpt.mark; - const vgStroke = keys(stroke).reduce((def, k) => { - def[k] = [{ - test: [x !== undefined && "".concat(xvname, "[0] !== ").concat(xvname, "[1]"), y !== undefined && "".concat(yvname, "[0] !== ").concat(yvname, "[1]")].filter(t => t).join(' && '), - value: stroke[k] - }, { - value: null - }]; - return def; - }, {}); - return [{ - name: name + BRUSH + '_bg', - type: 'rect', - clip: true, - encode: { - enter: { - fill: { - value: fill - }, - fillOpacity: { - value: fillOpacity - } - }, - update: update - } - }, ...marks, { - name: name + BRUSH, - type: 'rect', - clip: true, - encode: { - enter: { ...(cursor ? { - cursor: { - value: cursor - } - } : {}), - fill: { - value: 'transparent' - } - }, - update: { ...update, - ...vgStroke - } - } - }]; - } - }; - /** - * Returns the visual and data signals for an interval selection. - */ - - function channelSignals(model, selCmpt, proj, init) { - const channel = proj.channel; - const vname = proj.signals.visual; - const dname = proj.signals.data; - const hasScales = scaleBindings.has(selCmpt); - const scaleName = $(model.scaleName(channel)); - const scale = model.getScaleComponent(channel); - const scaleType = scale ? scale.get('type') : undefined; - - const scaled = str => "scale(".concat(scaleName, ", ").concat(str, ")"); - - const size = model.getSizeSignalRef(channel === X ? 'width' : 'height').signal; - const coord = "".concat(channel, "(unit)"); - const on = events(selCmpt, (def, evt) => { - return [...def, { - events: evt.between[0], - update: "[".concat(coord, ", ").concat(coord, "]") - }, // Brush Start - { - events: evt, - update: "[".concat(vname, "[0], clamp(").concat(coord, ", 0, ").concat(size, ")]") - } // Brush End - ]; - }); // React to pan/zooms of continuous scales. Non-continuous scales - // (band, point) cannot be pan/zoomed and any other changes - // to their domains (e.g., filtering) should clear the brushes. - - on.push({ - events: { - signal: selCmpt.name + SCALE_TRIGGER - }, - update: hasContinuousDomain(scaleType) ? "[".concat(scaled("".concat(dname, "[0]")), ", ").concat(scaled("".concat(dname, "[1]")), "]") : "[0, 0]" - }); - return hasScales ? [{ - name: dname, - on: [] - }] : [{ - name: vname, - ...(init ? { - init: assembleInit(init, true, scaled) - } : { - value: [] - }), - on: on - }, { - name: dname, - ...(init ? { - init: assembleInit(init) - } : {}), - // Cannot be `value` as `init` may require datetime exprs. - on: [{ - events: { - signal: vname - }, - update: "".concat(vname, "[0] === ").concat(vname, "[1] ? null : invert(").concat(scaleName, ", ").concat(vname, ")") - }] - }]; - } - - function events(selCmpt, cb) { - return selCmpt.events.reduce((on, evt) => { - if (!evt.between) { - warn("".concat(evt, " is not an ordered event stream for interval selections.")); - return on; - } - - return cb(on, evt); - }, []); - } - - function singleOrMultiSignals(model, selCmpt) { - const name = selCmpt.name; - const fieldsSg = name + TUPLE_FIELDS; - const project = selCmpt.project; - const datum = '(item().isVoronoi ? datum.datum : datum)'; - const values = project.items.map(p => { - const fieldDef = model.fieldDef(p.channel); // Binned fields should capture extents, for a range test against the raw field. - - return fieldDef && fieldDef.bin ? "[".concat(datum, "[").concat($(model.vgField(p.channel, {})), "], ") + "".concat(datum, "[").concat($(model.vgField(p.channel, { - binSuffix: 'end' - })), "]]") : "".concat(datum, "[").concat($(p.field), "]"); - }).join(', '); // Only add a discrete selection to the store if a datum is present _and_ - // the interaction isn't occurring on a group mark. This guards against - // polluting interactive state with invalid values in faceted displays - // as the group marks are also data-driven. We force the update to account - // for constant null states but varying toggles (e.g., shift-click in - // whitespace followed by a click in whitespace; the store should only - // be cleared on the second click). - - const update = "unit: ".concat(unitName(model), ", fields: ").concat(fieldsSg, ", values"); - const events = selCmpt.events; - return [{ - name: name + TUPLE, - on: events ? [{ - events, - update: "datum && item().mark.marktype !== 'group' ? {".concat(update, ": [").concat(values, "]} : null"), - force: true - }] : [] - }]; - } - const multi = { - signals: singleOrMultiSignals, - modifyExpr: (model, selCmpt) => { - const tpl = selCmpt.name + TUPLE; - return tpl + ', ' + (selCmpt.resolve === 'global' ? 'null' : "{unit: ".concat(unitName(model), "}")); - } - }; - - const single = { - signals: singleOrMultiSignals, - modifyExpr: (model, selCmpt) => { - const tpl = selCmpt.name + TUPLE; - return tpl + ', ' + (selCmpt.resolve === 'global' ? 'true' : "{unit: ".concat(unitName(model), "}")); - } - }; - const STORE = '_store'; const TUPLE = '_tuple'; const MODIFY = '_modify'; const VL_SELECTION_RESOLVE = 'vlSelectionResolve'; - const compilers$1 = { - single, - multi, - interval - }; - function forEachSelection(model, cb) { - const selections = model.component.selection; + // Order matters for parsing and assembly. + const selectionCompilers = [point$1, interval, project, toggle, // Bindings may disable direct manipulation. + inputBindings, scaleBindings, legendBindings, clear, translate, zoom, nearest]; - if (selections) { - for (const sel of vals(selections)) { - const success = cb(sel, compilers$1[sel.type]); - if (success === true) break; - } - } - } - function getFacetModel(model) { let parent = model.parent; while (parent) { - if (isFacetModel(parent)) { - break; - } - + if (isFacetModel(parent)) break; parent = parent.parent; } return parent; } @@ -12248,33 +12343,38 @@ function unitName(model, { escape } = { escape: true }) { - let name = escape ? $(model.name) : model.name; + let name = escape ? vegaUtil.stringValue(model.name) : model.name; const facetModel = getFacetModel(model); if (facetModel) { const { facet } = facetModel; for (const channel of FACET_CHANNELS) { if (facet[channel]) { - name += " + '__facet_".concat(channel, "_' + (facet[").concat($(facetModel.vgField(channel)), "])"); + name += ` + '__facet_${channel}_' + (facet[${vegaUtil.stringValue(facetModel.vgField(channel))}])`; } } } return name; } function requiresSelectionId(model) { - let identifier = false; - forEachSelection(model, selCmpt => { - identifier = identifier || selCmpt.project.items.some(proj => proj.field === SELECTION_ID); - }); - return identifier; + return vals(model.component.selection ?? {}).reduce((identifier, selCmpt) => { + return identifier || selCmpt.project.items.some(proj => proj.field === SELECTION_ID); + }, false); + } // Binding a point selection to query widgets or legends disables default direct manipulation interaction. + // A user can choose to re-enable it by explicitly specifying triggering input events. + + function disableDirectManipulation(selCmpt, selDef) { + if (vega.isString(selDef.select) || !selDef.select.on) delete selCmpt.events; + if (vega.isString(selDef.select) || !selDef.select.clear) delete selCmpt.clear; + if (vega.isString(selDef.select) || !selDef.select.toggle) delete selCmpt.toggle; } const RawCode = 'RawCode'; const Literal = 'Literal'; const Property = 'Property'; @@ -12412,11 +12512,11 @@ MessageUnexpectedEOS = 'Unexpected end of input', MessageInvalidRegExp = 'Invalid regular expression', MessageUnterminatedRegExp = 'Invalid regular expression: missing /', MessageStrictOctalLiteral = 'Octal literals are not allowed in strict mode.', MessageStrictDuplicateProperty = 'Duplicate data property in object literal not allowed in strict mode'; - var ILLEGAL$1 = 'ILLEGAL', + var ILLEGAL = 'ILLEGAL', DISABLED = 'Disabled.'; // See also tools/generate-unicode-regex.py. var RegexNonAsciiIdentifierStart = new RegExp('[\\xAA\\xB5\\xBA\\xC0-\\xD6\\xD8-\\xF6\\xF8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0370-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0620-\\u064A\\u066E\\u066F\\u0671-\\u06D3\\u06D5\\u06E5\\u06E6\\u06EE\\u06EF\\u06FA-\\u06FC\\u06FF\\u0710\\u0712-\\u072F\\u074D-\\u07A5\\u07B1\\u07CA-\\u07EA\\u07F4\\u07F5\\u07FA\\u0800-\\u0815\\u081A\\u0824\\u0828\\u0840-\\u0858\\u08A0-\\u08B2\\u0904-\\u0939\\u093D\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BD\\u09CE\\u09DC\\u09DD\\u09DF-\\u09E1\\u09F0\\u09F1\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A59-\\u0A5C\\u0A5E\\u0A72-\\u0A74\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABD\\u0AD0\\u0AE0\\u0AE1\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3D\\u0B5C\\u0B5D\\u0B5F-\\u0B61\\u0B71\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BD0\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D\\u0C58\\u0C59\\u0C60\\u0C61\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBD\\u0CDE\\u0CE0\\u0CE1\\u0CF1\\u0CF2\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D\\u0D4E\\u0D60\\u0D61\\u0D7A-\\u0D7F\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0E01-\\u0E30\\u0E32\\u0E33\\u0E40-\\u0E46\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB0\\u0EB2\\u0EB3\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EDC-\\u0EDF\\u0F00\\u0F40-\\u0F47\\u0F49-\\u0F6C\\u0F88-\\u0F8C\\u1000-\\u102A\\u103F\\u1050-\\u1055\\u105A-\\u105D\\u1061\\u1065\\u1066\\u106E-\\u1070\\u1075-\\u1081\\u108E\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176C\\u176E-\\u1770\\u1780-\\u17B3\\u17D7\\u17DC\\u1820-\\u1877\\u1880-\\u18A8\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1950-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19C1-\\u19C7\\u1A00-\\u1A16\\u1A20-\\u1A54\\u1AA7\\u1B05-\\u1B33\\u1B45-\\u1B4B\\u1B83-\\u1BA0\\u1BAE\\u1BAF\\u1BBA-\\u1BE5\\u1C00-\\u1C23\\u1C4D-\\u1C4F\\u1C5A-\\u1C7D\\u1CE9-\\u1CEC\\u1CEE-\\u1CF1\\u1CF5\\u1CF6\\u1D00-\\u1DBF\\u1E00-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u2071\\u207F\\u2090-\\u209C\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CEE\\u2CF2\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D80-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2E2F\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA61F\\uA62A\\uA62B\\uA640-\\uA66E\\uA67F-\\uA69D\\uA6A0-\\uA6EF\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA801\\uA803-\\uA805\\uA807-\\uA80A\\uA80C-\\uA822\\uA840-\\uA873\\uA882-\\uA8B3\\uA8F2-\\uA8F7\\uA8FB\\uA90A-\\uA925\\uA930-\\uA946\\uA960-\\uA97C\\uA984-\\uA9B2\\uA9CF\\uA9E0-\\uA9E4\\uA9E6-\\uA9EF\\uA9FA-\\uA9FE\\uAA00-\\uAA28\\uAA40-\\uAA42\\uAA44-\\uAA4B\\uAA60-\\uAA76\\uAA7A\\uAA7E-\\uAAAF\\uAAB1\\uAAB5\\uAAB6\\uAAB9-\\uAABD\\uAAC0\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEA\\uAAF2-\\uAAF4\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABE2\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D\\uFB1F-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF21-\\uFF3A\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]'), // eslint-disable-next-line no-misleading-character-class RegexNonAsciiIdentifierPart = new RegExp('[\\xAA\\xB5\\xBA\\xC0-\\xD6\\xD8-\\xF6\\xF8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0300-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u0483-\\u0487\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0610-\\u061A\\u0620-\\u0669\\u066E-\\u06D3\\u06D5-\\u06DC\\u06DF-\\u06E8\\u06EA-\\u06FC\\u06FF\\u0710-\\u074A\\u074D-\\u07B1\\u07C0-\\u07F5\\u07FA\\u0800-\\u082D\\u0840-\\u085B\\u08A0-\\u08B2\\u08E4-\\u0963\\u0966-\\u096F\\u0971-\\u0983\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BC-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CE\\u09D7\\u09DC\\u09DD\\u09DF-\\u09E3\\u09E6-\\u09F1\\u0A01-\\u0A03\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A59-\\u0A5C\\u0A5E\\u0A66-\\u0A75\\u0A81-\\u0A83\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABC-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AD0\\u0AE0-\\u0AE3\\u0AE6-\\u0AEF\\u0B01-\\u0B03\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3C-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B5C\\u0B5D\\u0B5F-\\u0B63\\u0B66-\\u0B6F\\u0B71\\u0B82\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD0\\u0BD7\\u0BE6-\\u0BEF\\u0C00-\\u0C03\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C58\\u0C59\\u0C60-\\u0C63\\u0C66-\\u0C6F\\u0C81-\\u0C83\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBC-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CDE\\u0CE0-\\u0CE3\\u0CE6-\\u0CEF\\u0CF1\\u0CF2\\u0D01-\\u0D03\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4E\\u0D57\\u0D60-\\u0D63\\u0D66-\\u0D6F\\u0D7A-\\u0D7F\\u0D82\\u0D83\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DE6-\\u0DEF\\u0DF2\\u0DF3\\u0E01-\\u0E3A\\u0E40-\\u0E4E\\u0E50-\\u0E59\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB9\\u0EBB-\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EC8-\\u0ECD\\u0ED0-\\u0ED9\\u0EDC-\\u0EDF\\u0F00\\u0F18\\u0F19\\u0F20-\\u0F29\\u0F35\\u0F37\\u0F39\\u0F3E-\\u0F47\\u0F49-\\u0F6C\\u0F71-\\u0F84\\u0F86-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u1000-\\u1049\\u1050-\\u109D\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u135D-\\u135F\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1714\\u1720-\\u1734\\u1740-\\u1753\\u1760-\\u176C\\u176E-\\u1770\\u1772\\u1773\\u1780-\\u17D3\\u17D7\\u17DC\\u17DD\\u17E0-\\u17E9\\u180B-\\u180D\\u1810-\\u1819\\u1820-\\u1877\\u1880-\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1920-\\u192B\\u1930-\\u193B\\u1946-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19B0-\\u19C9\\u19D0-\\u19D9\\u1A00-\\u1A1B\\u1A20-\\u1A5E\\u1A60-\\u1A7C\\u1A7F-\\u1A89\\u1A90-\\u1A99\\u1AA7\\u1AB0-\\u1ABD\\u1B00-\\u1B4B\\u1B50-\\u1B59\\u1B6B-\\u1B73\\u1B80-\\u1BF3\\u1C00-\\u1C37\\u1C40-\\u1C49\\u1C4D-\\u1C7D\\u1CD0-\\u1CD2\\u1CD4-\\u1CF6\\u1CF8\\u1CF9\\u1D00-\\u1DF5\\u1DFC-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u200C\\u200D\\u203F\\u2040\\u2054\\u2071\\u207F\\u2090-\\u209C\\u20D0-\\u20DC\\u20E1\\u20E5-\\u20F0\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D7F-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2DE0-\\u2DFF\\u2E2F\\u3005-\\u3007\\u3021-\\u302F\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u3099\\u309A\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA62B\\uA640-\\uA66F\\uA674-\\uA67D\\uA67F-\\uA69D\\uA69F-\\uA6F1\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA827\\uA840-\\uA873\\uA880-\\uA8C4\\uA8D0-\\uA8D9\\uA8E0-\\uA8F7\\uA8FB\\uA900-\\uA92D\\uA930-\\uA953\\uA960-\\uA97C\\uA980-\\uA9C0\\uA9CF-\\uA9D9\\uA9E0-\\uA9FE\\uAA00-\\uAA36\\uAA40-\\uAA4D\\uAA50-\\uAA59\\uAA60-\\uAA76\\uAA7A-\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEF\\uAAF2-\\uAAF6\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABEA\\uABEC\\uABED\\uABF0-\\uABF9\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE00-\\uFE0F\\uFE20-\\uFE2D\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF10-\\uFF19\\uFF21-\\uFF3A\\uFF3F\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]'); // Ensure the condition is true, otherwise throw an error. @@ -12539,11 +12639,11 @@ for (i = 0; i < len; ++i) { if (index < length && isHexDigit(source[index])) { ch = source[index++]; code = code * 16 + '0123456789abcdef'.indexOf(ch.toLowerCase()); } else { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } } return String.fromCharCode(code); } @@ -12552,11 +12652,11 @@ var ch, code, cu1, cu2; ch = source[index]; code = 0; // At least, one hex digit is required. if (ch === '}') { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } while (index < length) { ch = source[index++]; @@ -12566,11 +12666,11 @@ code = code * 16 + '0123456789abcdef'.indexOf(ch.toLowerCase()); } if (code > 0x10FFFF || ch !== '}') { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } // UTF-16 Encoding if (code <= 0xFFFF) { return String.fromCharCode(code); @@ -12586,18 +12686,18 @@ ch = source.charCodeAt(index++); id = String.fromCharCode(ch); // '\u' (U+005C, U+0075) denotes an escaped character. if (ch === 0x5C) { if (source.charCodeAt(index) !== 0x75) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } ++index; ch = scanHexEscape('u'); if (!ch || ch === '\\' || !isIdentifierStart(ch.charCodeAt(0))) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } id = ch; } @@ -12613,18 +12713,18 @@ if (ch === 0x5C) { id = id.substr(0, id.length - 1); if (source.charCodeAt(index) !== 0x75) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } ++index; ch = scanHexEscape('u'); if (!ch || ch === '\\' || !isIdentifierPart(ch.charCodeAt(0))) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } id += ch; } } @@ -12816,10 +12916,14 @@ type: TokenPunctuator, value: ch2, start: start, end: index }; + } + + if (ch2 === '//') { + throwError({}, MessageUnexpectedToken, ILLEGAL); } // 1-character punctuators: < > = ! + - * % & | ^ / if ('<>=!+-*%&|^/'.indexOf(ch1) >= 0) { ++index; @@ -12829,11 +12933,11 @@ start: start, end: index }; } - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } // 7.8.3 Numeric Literals function scanHexLiteral(start) { let number = ''; @@ -12845,15 +12949,15 @@ number += source[index++]; } if (number.length === 0) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } if (isIdentifierStart(source.charCodeAt(index))) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } return { type: TokenNumericLiteral, value: parseInt('0x' + number, 16), @@ -12872,11 +12976,11 @@ number += source[index++]; } if (isIdentifierStart(source.charCodeAt(index)) || isDecimalDigit(source.charCodeAt(index))) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } return { type: TokenNumericLiteral, value: parseInt(number, 8), @@ -12908,11 +13012,11 @@ return scanOctalLiteral(start); } // decimal number starts with '0' such as '09' is illegal. if (ch && isDecimalDigit(ch.charCodeAt(0))) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } } while (isDecimalDigit(source.charCodeAt(index))) { number += source[index++]; @@ -12942,16 +13046,16 @@ if (isDecimalDigit(source.charCodeAt(index))) { while (isDecimalDigit(source.charCodeAt(index))) { number += source[index++]; } } else { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } } if (isIdentifierStart(source.charCodeAt(index))) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } return { type: TokenNumericLiteral, value: parseFloat(number), @@ -13055,11 +13159,11 @@ str += ch; } } if (quote !== '') { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } return { type: TokenStringLiteral, value: str, @@ -13168,11 +13272,11 @@ } ++index; if (ch === '\\' && index < length) { - throwError({}, MessageUnexpectedToken, ILLEGAL$1); + throwError({}, MessageUnexpectedToken, ILLEGAL); } else { flags += ch; str += ch; } } @@ -13512,11 +13616,11 @@ } // 11.1.6 The Grouping Operator function parseGroupExpression() { expect('('); - const expr = parseExpression(); + const expr = parseExpression$1(); expect(')'); return expr; } // 11.1 Primary Expressions @@ -13607,11 +13711,11 @@ return parseNonComputedProperty(); } function parseComputedMember() { expect('['); - const expr = parseExpression(); + const expr = parseExpression$1(); expect(']'); return expr; } function parseLeftHandSideExpressionAllowCall() { @@ -13808,11 +13912,11 @@ return expr; } // 11.14 Comma Operator - function parseExpression() { + function parseExpression$1() { const expr = parseConditionalExpression(); if (match(',')) { throw new Error(DISABLED); // no sequence expressions } @@ -13824,11 +13928,11 @@ source = code; index = 0; length = source.length; lookahead = null; peek(); - const expr = parseExpression(); + const expr = parseExpression$1(); if (lookahead.type !== TokenEOF) { throw new Error('Unexpect token after expression.'); } @@ -13907,138 +14011,147 @@ expr: this.expr }; } hash() { - return "Filter ".concat(this.expr); + return `Filter ${this.expr}`; } } function parseUnitSelection(model, selDefs) { const selCmpts = {}; const selectionConfig = model.config.selection; + if (!selDefs || !selDefs.length) return selCmpts; - for (const name of keys(selDefs !== null && selDefs !== void 0 ? selDefs : {})) { - const selDef = duplicate(selDefs[name]); - const { - fields, - encodings, - ...cfg - } = selectionConfig[selDef.type]; // Project transform applies its defaults. - // Set default values from config if a property hasn't been specified, + for (const def of selDefs) { + const name = varName(def.name); + const selDef = def.select; + const type = vegaUtil.isString(selDef) ? selDef : selDef.type; + const defaults = vegaUtil.isObject(selDef) ? duplicate(selDef) : { + type + }; // Set default values from config if a property hasn't been specified, // or if it is true. E.g., "translate": true should use the default // event handlers for translate. However, true may be a valid value for // a property (e.g., "nearest": true). + const cfg = selectionConfig[type]; + for (const key in cfg) { - // A selection should contain either `encodings` or `fields`, only use - // default values for these two values if neither of them is specified. - if (key === 'encodings' && selDef.fields || key === 'fields' && selDef.encodings) { + // Project transform applies its defaults. + if (key === 'fields' || key === 'encodings') { continue; } if (key === 'mark') { - selDef[key] = { ...cfg[key], - ...selDef[key] + defaults[key] = { ...cfg[key], + ...defaults[key] }; } - if (selDef[key] === undefined || selDef[key] === true) { - var _cfg$key; - - selDef[key] = (_cfg$key = cfg[key]) !== null && _cfg$key !== void 0 ? _cfg$key : selDef[key]; + if (defaults[key] === undefined || defaults[key] === true) { + defaults[key] = cfg[key] ?? defaults[key]; } } - const safeName = varName(name); - const selCmpt = selCmpts[safeName] = { ...selDef, - name: safeName, - events: isString(selDef.on) ? eventSelector(selDef.on, 'scope') : duplicate(selDef.on) + const selCmpt = selCmpts[name] = { ...defaults, + name, + type, + init: def.value, + bind: def.bind, + events: vegaUtil.isString(defaults.on) ? eventSelector(defaults.on, 'scope') : vegaUtil.array(duplicate(defaults.on)) }; - forEachTransform(selCmpt, txCompiler => { - if (txCompiler.has(selCmpt) && txCompiler.parse) { - txCompiler.parse(model, selCmpt, selDef, selDefs[name]); + + for (const c of selectionCompilers) { + if (c.defined(selCmpt) && c.parse) { + c.parse(model, selCmpt, def); } - }); + } } return selCmpts; } - function parseSelectionPredicate(model, selections, dfnode, datum = 'datum') { - const stores = []; + function parseSelectionPredicate(model, pred, dfnode, datum = 'datum') { + const name = vegaUtil.isString(pred) ? pred : pred.param; + const vname = varName(name); + const store = vegaUtil.stringValue(vname + STORE); + let selCmpt; - function expr(name) { - const vname = varName(name); - const selCmpt = model.getSelectionComponent(vname, name); - const store = $(vname + STORE); + try { + selCmpt = model.getSelectionComponent(vname, name); + } catch (e) { + // If a selection isn't found, treat as a variable parameter and coerce to boolean. + return `!!${vname}`; + } - if (selCmpt.project.timeUnit) { - const child = dfnode !== null && dfnode !== void 0 ? dfnode : model.component.data.raw; - const tunode = selCmpt.project.timeUnit.clone(); + if (selCmpt.project.timeUnit) { + const child = dfnode ?? model.component.data.raw; + const tunode = selCmpt.project.timeUnit.clone(); - if (child.parent) { - tunode.insertAsParentOf(child); - } else { - child.parent = tunode; - } + if (child.parent) { + tunode.insertAsParentOf(child); + } else { + child.parent = tunode; } - - if (selCmpt.empty !== 'none') { - stores.push(store); - } - - return "vlSelectionTest(".concat(store, ", ").concat(datum) + (selCmpt.resolve === 'global' ? ')' : ", ".concat($(selCmpt.resolve), ")")); } - const predicateStr = logicalExpr(selections, expr); - return (stores.length ? '!(' + stores.map(s => "length(data(".concat(s, "))")).join(' || ') + ') || ' : '') + "(".concat(predicateStr, ")"); + const test = `vlSelectionTest(${store}, ${datum}${selCmpt.resolve === 'global' ? ')' : `, ${vegaUtil.stringValue(selCmpt.resolve)})`}`; + const length = `length(data(${store}))`; + return pred.empty === false ? `${length} && ${test}` : `!${length} || ${test}`; } - function parseSelectionBinExtent(selCmpt, extent) { + function parseSelectionExtent(model, name, extent) { + const vname = varName(name); const encoding = extent['encoding']; let field = extent['field']; + let selCmpt; + try { + selCmpt = model.getSelectionComponent(vname, name); + } catch (e) { + // If a selection isn't found, treat it as a variable parameter. + return vname; + } + if (!encoding && !field) { field = selCmpt.project.items[0].field; if (selCmpt.project.items.length > 1) { - warn('A "field" or "encoding" must be specified when using a selection as a scale domain. ' + "Using \"field\": ".concat($(field), ".")); + warn('A "field" or "encoding" must be specified when using a selection as a scale domain. ' + `Using "field": ${vegaUtil.stringValue(field)}.`); } } else if (encoding && !field) { const encodings = selCmpt.project.items.filter(p => p.channel === encoding); if (!encodings.length || encodings.length > 1) { field = selCmpt.project.items[0].field; - warn((!encodings.length ? 'No ' : 'Multiple ') + "matching ".concat($(encoding), " encoding found for selection ").concat($(extent.selection), ". ") + "Using \"field\": ".concat($(field), ".")); + warn((!encodings.length ? 'No ' : 'Multiple ') + `matching ${vegaUtil.stringValue(encoding)} encoding found for selection ${vegaUtil.stringValue(extent.param)}. ` + `Using "field": ${vegaUtil.stringValue(field)}.`); } else { field = encodings[0].field; } } - return "".concat(selCmpt.name, "[").concat($(field), "]"); + return `${selCmpt.name}[${vegaUtil.stringValue(replacePathInField(field))}]`; } function materializeSelections(model, main) { - forEachSelection(model, selCmpt => { - const selection = selCmpt.name; - const lookupName = model.getName("lookup_".concat(selection)); + for (const [selection, selCmpt] of entries$1(model.component.selection ?? {})) { + const lookupName = model.getName(`lookup_${selection}`); model.component.data.outputNodes[lookupName] = selCmpt.materialized = new OutputNode(new FilterNode(main, model, { - selection + param: selection }), lookupName, DataSourceType.Lookup, model.component.data.outputNodeRefCounts); - }); + } } /** * Converts a predicate into an expression. */ // model is only used for selection filters. function expression(model, filterOp, node) { return logicalExpr(filterOp, predicate => { - if (isString(predicate)) { + if (vegaUtil.isString(predicate)) { return predicate; } else if (isSelectionPredicate(predicate)) { - return parseSelectionPredicate(model, predicate.selection, node); + return parseSelectionPredicate(model, predicate, node); } else { // Filter Object return fieldFilterExpression(predicate); } }); @@ -14047,23 +14160,23 @@ function assembleTitle(title, config) { if (!title) { return undefined; } - if (isArray(title) && !isText(title)) { + if (vegaUtil.isArray(title) && !isText(title)) { return title.map(fieldDef => defaultTitle(fieldDef, config)).join(', '); } return title; } function setAxisEncode(axis, part, vgProp, vgRef) { - var _axis$encode, _axis$encode$part, _axis$encode$part$upd; + var _axis$encode, _axis$encode$part; - axis.encode = (_axis$encode = axis.encode) !== null && _axis$encode !== void 0 ? _axis$encode : {}; - axis.encode[part] = (_axis$encode$part = axis.encode[part]) !== null && _axis$encode$part !== void 0 ? _axis$encode$part : {}; - axis.encode[part].update = (_axis$encode$part$upd = axis.encode[part].update) !== null && _axis$encode$part$upd !== void 0 ? _axis$encode$part$upd : {}; // TODO: remove as any after https://github.com/prisma/nexus-prisma/issues/291 + axis.encode ?? (axis.encode = {}); + (_axis$encode = axis.encode)[part] ?? (_axis$encode[part] = {}); + (_axis$encode$part = axis.encode[part]).update ?? (_axis$encode$part.update = {}); // TODO: remove as any after https://github.com/prisma/nexus-prisma/issues/291 axis.encode[part].update[vgProp] = vgRef; } function assembleAxis(axisCmpt, kind, config, opt = { @@ -14094,11 +14207,11 @@ // deal with conditional axis value const { condition, ...valueOrSignalRef } = propValue; - const conditions = array(condition); + const conditions = vegaUtil.array(condition); const propIndex = CONDITIONAL_AXIS_PROP_INDEX[prop]; if (propIndex) { const { vgProp, @@ -14124,11 +14237,11 @@ signal: conditions.map(c => { const { test, ...valueOrSignalCRef } = c; - return "".concat(expression(null, test), " ? ").concat(exprFromValueOrSignalRef(valueOrSignalCRef), " : "); + return `${expression(null, test)} ? ${exprFromValueOrSignalRef(valueOrSignalCRef)} : `; }).join('') + exprFromValueOrSignalRef(valueOrSignalRef) }; axis[prop] = signalRef; } } else if (isSignalRef(propValue)) { @@ -14141,10 +14254,16 @@ } = propIndex; setAxisEncode(axis, part, vgProp, propValue); delete axis[prop]; } // else do nothing since the property already supports signal + } // Do not pass labelAlign/Baseline = null to Vega since it won't pass the schema + // Note that we need to use null so the default labelAlign is preserved. + + + if (contains(['labelAlign', 'labelBaseline'], prop) && axis[prop] === null) { + delete axis[prop]; } } if (kind === 'grid') { if (!axis.grid) { @@ -14193,11 +14312,11 @@ if (labelExpr !== undefined) { var _axis$encode2, _axis$encode2$labels; let expr = labelExpr; - if (((_axis$encode2 = axis.encode) === null || _axis$encode2 === void 0 ? void 0 : (_axis$encode2$labels = _axis$encode2.labels) === null || _axis$encode2$labels === void 0 ? void 0 : _axis$encode2$labels.update) && isSignalRef(axis.encode.labels.update.text)) { + if ((_axis$encode2 = axis.encode) !== null && _axis$encode2 !== void 0 && (_axis$encode2$labels = _axis$encode2.labels) !== null && _axis$encode2$labels !== void 0 && _axis$encode2$labels.update && isSignalRef(axis.encode.labels.update.text)) { expr = replaceAll(labelExpr, 'datum.label', axis.encode.labels.update.text.signal); } setAxisEncode(axis, 'labels', 'text', { signal: expr @@ -14289,11 +14408,11 @@ const conditionalOrientAxisConfig = {}; for (const prop of props.values()) { conditionalOrientAxisConfig[prop] = { // orient is surely signal in this case - signal: "".concat(orient['signal'], " === \"").concat(orient1, "\" ? ").concat(signalOrStringValue(orientConfig1[prop]), " : ").concat(signalOrStringValue(orientConfig2[prop])) + signal: `${orient['signal']} === "${orient1}" ? ${signalOrStringValue(orientConfig1[prop])} : ${signalOrStringValue(orientConfig2[prop])}` }; } return conditionalOrientAxisConfig; } @@ -14303,11 +14422,11 @@ } function getAxisConfigs(channel, scaleType, orient, config) { const typeBasedConfigTypes = scaleType === 'band' ? ['axisDiscrete', 'axisBand'] : scaleType === 'point' ? ['axisDiscrete', 'axisPoint'] : isQuantitative(scaleType) ? ['axisQuantitative'] : scaleType === 'time' || scaleType === 'utc' ? ['axisTemporal'] : []; const axisChannel = channel === 'x' ? 'axisX' : 'axisY'; - const axisOrient = isSignalRef(orient) ? 'axisOrient' : 'axis' + titleCase(orient); // axisTop, axisBottom, ... + const axisOrient = isSignalRef(orient) ? 'axisOrient' : `axis${titleCase(orient)}`; // axisTop, axisBottom, ... const vlOnlyConfigTypes = [// technically Vega does have axisBand, but if we make another separation here, // it will further introduce complexity in the code ...typeBasedConfigTypes, ...typeBasedConfigTypes.map(c => axisChannel + c.substr(4))]; const vgConfigTypes = ['axis', axisOrient, axisChannel]; @@ -14325,11 +14444,11 @@ // TODO: add special casing to add conditional value based on orient signal let style = (_config$configType = config[configType]) === null || _config$configType === void 0 ? void 0 : _config$configType.style; if (style) { - style = array(style); + style = vegaUtil.array(style); for (const s of style) { toMerge.push(config.style[s]); } } @@ -14389,19 +14508,11 @@ }, grid: ({ fieldOrDatumDef, axis, scaleType - }) => { - if (isFieldDef(fieldOrDatumDef) && isBinned(fieldOrDatumDef.bin)) { - return false; - } else { - var _axis$grid; - - return (_axis$grid = axis.grid) !== null && _axis$grid !== void 0 ? _axis$grid : defaultGrid(scaleType, fieldOrDatumDef); - } - }, + }) => axis.grid ?? defaultGrid(scaleType, fieldOrDatumDef), gridScale: ({ model, channel }) => gridScale(model, channel), labelAlign: ({ @@ -14422,24 +14533,16 @@ }) => axis.labelBaseline || defaultLabelBaseline(labelAngle, orient, channel), labelFlush: ({ axis, fieldOrDatumDef, channel - }) => { - var _axis$labelFlush; - - return (_axis$labelFlush = axis.labelFlush) !== null && _axis$labelFlush !== void 0 ? _axis$labelFlush : defaultLabelFlush(fieldOrDatumDef.type, channel); - }, + }) => axis.labelFlush ?? defaultLabelFlush(fieldOrDatumDef.type, channel), labelOverlap: ({ axis, fieldOrDatumDef, scaleType - }) => { - var _axis$labelOverlap; - - return (_axis$labelOverlap = axis.labelOverlap) !== null && _axis$labelOverlap !== void 0 ? _axis$labelOverlap : defaultLabelOverlap(fieldOrDatumDef.type, scaleType, isFieldDef(fieldOrDatumDef) && !!fieldOrDatumDef.timeUnit, isFieldDef(fieldOrDatumDef) ? fieldOrDatumDef.sort : undefined); - }, + }) => axis.labelOverlap ?? defaultLabelOverlap$1(fieldOrDatumDef.type, scaleType, isFieldDef(fieldOrDatumDef) && !!fieldOrDatumDef.timeUnit, isFieldDef(fieldOrDatumDef) ? fieldOrDatumDef.sort : undefined), // we already calculate orient in parse orient: ({ orient }) => orient, // Need to cast until Vega supports signal @@ -14448,15 +14551,13 @@ model, axis, fieldOrDatumDef, scaleType }) => { - var _axis$tickCount; - const sizeType = channel === 'x' ? 'width' : channel === 'y' ? 'height' : undefined; const size = sizeType ? model.getSizeSignalRef(sizeType) : undefined; - return (_axis$tickCount = axis.tickCount) !== null && _axis$tickCount !== void 0 ? _axis$tickCount : defaultTickCount({ + return axis.tickCount ?? defaultTickCount({ fieldOrDatumDef, scaleType, size, values: axis.values }); @@ -14483,29 +14584,25 @@ return mergeTitleFieldDefs(fieldDef ? [toFieldDefBase(fieldDef)] : [], isFieldDef(fieldDef2) ? [toFieldDefBase(fieldDef2)] : []); }, values: ({ axis, fieldOrDatumDef - }) => values(axis, fieldOrDatumDef), + }) => values$1(axis, fieldOrDatumDef), zindex: ({ axis, fieldOrDatumDef, mark - }) => { - var _axis$zindex; - - return (_axis$zindex = axis.zindex) !== null && _axis$zindex !== void 0 ? _axis$zindex : defaultZindex(mark, fieldOrDatumDef); - } + }) => axis.zindex ?? defaultZindex(mark, fieldOrDatumDef) }; // TODO: we need to refactor this method after we take care of config refactoring /** * Default rules for whether to show a grid should be shown for a channel. * If `grid` is unspecified, the default value is `true` for ordinal scales that are not binned */ function defaultGrid(scaleType, fieldDef) { - return !hasDiscreteDomain(scaleType) && isFieldDef(fieldDef) && !isBinning(fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.bin); + return !hasDiscreteDomain(scaleType) && isFieldDef(fieldDef) && !isBinning(fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.bin) && !isBinned(fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.bin); } function gridScale(model, channel) { const gridChannel = channel === 'x' ? 'y' : 'x'; if (model.getScaleComponent(gridChannel)) { @@ -14537,53 +14634,53 @@ return undefined; } } } function normalizeAngleExpr(angle) { - return "(((".concat(angle.signal, " % 360) + 360) % 360)"); + return `(((${angle.signal} % 360) + 360) % 360)`; } function defaultLabelBaseline(angle, orient, channel, alwaysIncludeMiddle) { if (angle !== undefined) { if (channel === 'x') { if (isSignalRef(angle)) { const a = normalizeAngleExpr(angle); - const orientIsTop = isSignalRef(orient) ? "(".concat(orient.signal, " === \"top\")") : orient === 'top'; + const orientIsTop = isSignalRef(orient) ? `(${orient.signal} === "top")` : orient === 'top'; return { - signal: "(45 < ".concat(a, " && ").concat(a, " < 135) || (225 < ").concat(a, " && ").concat(a, " < 315) ? \"middle\" :") + "(".concat(a, " <= 45 || 315 <= ").concat(a, ") === ").concat(orientIsTop, " ? \"bottom\" : \"top\"") + signal: `(45 < ${a} && ${a} < 135) || (225 < ${a} && ${a} < 315) ? "middle" :` + `(${a} <= 45 || 315 <= ${a}) === ${orientIsTop} ? "bottom" : "top"` }; } if (45 < angle && angle < 135 || 225 < angle && angle < 315) { return 'middle'; } if (isSignalRef(orient)) { const op = angle <= 45 || 315 <= angle ? '===' : '!=='; return { - signal: "".concat(orient.signal, " ").concat(op, " \"top\" ? \"bottom\" : \"top\"") + signal: `${orient.signal} ${op} "top" ? "bottom" : "top"` }; } return (angle <= 45 || 315 <= angle) === (orient === 'top') ? 'bottom' : 'top'; } else { if (isSignalRef(angle)) { const a = normalizeAngleExpr(angle); - const orientIsLeft = isSignalRef(orient) ? "(".concat(orient.signal, " === \"left\")") : orient === 'left'; + const orientIsLeft = isSignalRef(orient) ? `(${orient.signal} === "left")` : orient === 'left'; const middle = alwaysIncludeMiddle ? '"middle"' : 'null'; return { - signal: "".concat(a, " <= 45 || 315 <= ").concat(a, " || (135 <= ").concat(a, " && ").concat(a, " <= 225) ? ").concat(middle, " : (45 <= ").concat(a, " && ").concat(a, " <= 135) === ").concat(orientIsLeft, " ? \"top\" : \"bottom\"") + signal: `${a} <= 45 || 315 <= ${a} || (135 <= ${a} && ${a} <= 225) ? ${middle} : (45 <= ${a} && ${a} <= 135) === ${orientIsLeft} ? "top" : "bottom"` }; } if (angle <= 45 || 315 <= angle || 135 <= angle && angle <= 225) { return alwaysIncludeMiddle ? 'middle' : null; } if (isSignalRef(orient)) { const op = 45 <= angle && angle <= 135 ? '===' : '!=='; return { - signal: "".concat(orient.signal, " ").concat(op, " \"left\" ? \"top\" : \"bottom\"") + signal: `${orient.signal} ${op} "left" ? "top" : "bottom"` }; } return (45 <= angle && angle <= 135) === (orient === 'left') ? 'top' : 'bottom'; } @@ -14600,26 +14697,26 @@ const startAngle = isX ? 0 : 90; const mainOrient = isX ? 'bottom' : 'left'; if (isSignalRef(angle)) { const a = normalizeAngleExpr(angle); - const orientIsMain = isSignalRef(orient) ? "(".concat(orient.signal, " === \"").concat(mainOrient, "\")") : orient === mainOrient; + const orientIsMain = isSignalRef(orient) ? `(${orient.signal} === "${mainOrient}")` : orient === mainOrient; return { - signal: "(".concat(startAngle ? '(' + a + ' + 90)' : a, " % 180 === 0) ? ").concat(isX ? null : '"center"', " :") + "(".concat(startAngle, " < ").concat(a, " && ").concat(a, " < ").concat(180 + startAngle, ") === ").concat(orientIsMain, " ? \"left\" : \"right\"") + signal: `(${startAngle ? `(${a} + 90)` : a} % 180 === 0) ? ${isX ? null : '"center"'} :` + `(${startAngle} < ${a} && ${a} < ${180 + startAngle}) === ${orientIsMain} ? "left" : "right"` }; } if ((angle + startAngle) % 180 === 0) { // For bottom, use default label align so label flush still works return isX ? null : 'center'; } if (isSignalRef(orient)) { const op = startAngle < angle && angle < 180 + startAngle ? '===' : '!=='; - const orientIsMain = "".concat(orient.signal, " ").concat(op, " \"").concat(mainOrient, "\""); + const orientIsMain = `${orient.signal} ${op} "${mainOrient}"`; return { - signal: "".concat(orientIsMain, " ? \"left\" : \"right\"") + signal: `${orientIsMain} ? "left" : "right"` }; } if ((startAngle < angle && angle < 180 + startAngle) === (orient === mainOrient)) { return 'left'; @@ -14632,13 +14729,13 @@ return true; } return undefined; } - function defaultLabelOverlap(type, scaleType, hasTimeUnit, sort) { + function defaultLabelOverlap$1(type, scaleType, hasTimeUnit, sort) { // do not prevent overlap for nominal data because there is no way to infer what the missing labels are - if (hasTimeUnit && !isObject(sort) || type !== 'nominal' && type !== 'ordinal') { + if (hasTimeUnit && !vegaUtil.isObject(sort) || type !== 'nominal' && type !== 'ordinal') { if (scaleType === 'log' || scaleType === 'symlog') { return 'greedy'; } return true; @@ -14660,21 +14757,21 @@ var _normalizeTimeUnit; if (isBinning(fieldOrDatumDef.bin)) { // for binned data, we don't want more ticks than maxbins return { - signal: "ceil(".concat(size.signal, "/10)") + signal: `ceil(${size.signal}/10)` }; } if (fieldOrDatumDef.timeUnit && contains(['month', 'hours', 'day', 'quarter'], (_normalizeTimeUnit = normalizeTimeUnit(fieldOrDatumDef.timeUnit)) === null || _normalizeTimeUnit === void 0 ? void 0 : _normalizeTimeUnit.unit)) { return undefined; } } return { - signal: "ceil(".concat(size.signal, "/40)") + signal: `ceil(${size.signal}/40)` }; } return undefined; } @@ -14699,14 +14796,14 @@ return title2; } return undefined; } - function values(axis, fieldOrDatumDef) { + function values$1(axis, fieldOrDatumDef) { const vals = axis.values; - if (isArray(vals)) { + if (vegaUtil.isArray(vals)) { return valueArray(fieldOrDatumDef, vals); } else if (isSignalRef(vals)) { return vals; } @@ -14747,15 +14844,15 @@ timeUnit } = fieldDef; const sort = fieldDef.sort; // generate `datum["a"] === val0 ? 0 : datum["a"] === val1 ? 1 : ... : n` via FieldEqualPredicate const calculate = sort.map((sortValue, i) => { - return "".concat(fieldFilterExpression({ - field, - timeUnit, - equal: sortValue - }), " ? ").concat(i, " : "); + return `${fieldFilterExpression({ + field, + timeUnit, + equal: sortValue + })} ? ${i} : `; }).join('') + sort.length; parent = new CalculateNode(parent, { calculate, as: sortArrayIndexField(fieldDef, channel, { forAs: true @@ -14781,19 +14878,19 @@ as: this.transform.as }; } hash() { - return "Calculate ".concat(hash(this.transform)); + return `Calculate ${hash(this.transform)}`; } } function sortArrayIndexField(fieldDef, channel, opt) { return vgField(fieldDef, { prefix: channel, suffix: 'sort_index', - ...(opt !== null && opt !== void 0 ? opt : {}) + ...(opt ?? {}) }); } /** * Get header channel, which can be different from facet channel when orient is specified or when the facet channel is facet. @@ -14845,13 +14942,13 @@ titleOrient } = getHeaderProperties(['titleAnchor', 'titleAngle', 'titleOrient'], facetFieldDef.header, config, channel); const headerChannel = getHeaderChannel(channel, titleOrient); const titleAngle = normalizeAngle(ta); return { - name: "".concat(channel, "-title"), + name: `${channel}-title`, type: 'group', - role: "".concat(headerChannel, "-title"), + role: `${headerChannel}-title`, title: { text: title, ...(channel === 'row' ? { orient: 'left' } : {}), @@ -14903,25 +15000,23 @@ } return groups; } - function getSort(facetFieldDef, channel) { + function getSort$1(facetFieldDef, channel) { const { sort } = facetFieldDef; if (isSortField(sort)) { - var _sort$order; - return { field: vgField(sort, { expr: 'datum' }), - order: (_sort$order = sort.order) !== null && _sort$order !== void 0 ? _sort$order : 'ascending' + order: sort.order ?? 'ascending' }; - } else if (isArray(sort)) { + } else if (vegaUtil.isArray(sort)) { return { field: sortArrayIndexField(facetFieldDef, channel, { expr: 'datum' }), order: 'ascending' @@ -14929,11 +15024,11 @@ } else { return { field: vgField(facetFieldDef, { expr: 'datum' }), - order: sort !== null && sort !== void 0 ? sort : 'ascending' + order: sort ?? 'ascending' }; } } function assembleLabelTitle(facetFieldDef, channel, config) { @@ -14992,22 +15087,22 @@ const hasAxes = (axes === null || axes === void 0 ? void 0 : axes.length) > 0; if (title || hasAxes) { const sizeChannel = channel === 'row' ? 'height' : 'width'; return { - name: model.getName("".concat(channel, "_").concat(headerType)), + name: model.getName(`${channel}_${headerType}`), type: 'group', - role: "".concat(channel, "-").concat(headerType), + role: `${channel}-${headerType}`, ...(layoutHeader.facetFieldDef ? { from: { - data: model.getName(channel + '_domain') + data: model.getName(`${channel}_domain`) }, - sort: getSort(facetFieldDef, channel) + sort: getSort$1(facetFieldDef, channel) } : {}), ...(hasAxes && isFacetWithoutRowCol ? { from: { - data: model.getName("facet_domain_".concat(channel)) + data: model.getName(`facet_domain_${channel}`) } } : {}), ...(title ? { title } : {}), @@ -15044,11 +15139,11 @@ const titleBand = {}; for (const channel of FACET_CHANNELS) { const headerComponent = headerComponentIndex[channel]; - if (headerComponent === null || headerComponent === void 0 ? void 0 : headerComponent.facetFieldDef) { + if (headerComponent !== null && headerComponent !== void 0 && headerComponent.facetFieldDef) { const { titleAnchor, titleOrient } = getHeaderProperties(['titleAnchor', 'titleOrient'], headerComponent.facetFieldDef.header, config, channel); const headerChannel = getHeaderChannel(channel, titleOrient); @@ -15115,11 +15210,11 @@ } } return [stepSignal(scaleName, range), { name, - update: sizeExpr(scaleName, scaleComponent, "domain('".concat(scaleName, "').length")) + update: sizeExpr(scaleName, scaleComponent, `domain('${scaleName}').length`) }]; } } /* istanbul ignore next: Condition should not happen -- only for warning in development. */ @@ -15127,11 +15222,11 @@ throw new Error('layout size is step although width/height is not step.'); } else if (size == 'container') { const isWidth = name.endsWith('width'); const expr = isWidth ? 'containerSize()[0]' : 'containerSize()[1]'; const defaultValue = getViewConfigContinuousSize(model.config.view, isWidth ? 'width' : 'height'); - const safeExpr = "isFinite(".concat(expr, ") ? ").concat(expr, " : ").concat(defaultValue); + const safeExpr = `isFinite(${expr}) ? ${expr} : ${defaultValue}`; return [{ name, init: safeExpr, on: [{ update: safeExpr, @@ -15146,11 +15241,11 @@ } } function stepSignal(scaleName, range) { return { - name: scaleName + '_step', + name: `${scaleName}_step`, value: range.step }; } function sizeExpr(scaleName, scaleComponent, cardinality) { @@ -15160,11 +15255,11 @@ let paddingInner = scaleComponent.get('paddingInner'); paddingInner = type === 'band' ? // only band has real paddingInner paddingInner !== undefined ? paddingInner : padding : // For point, as calculated in https://github.com/vega/vega-scale/blob/master/src/band.js#L128, // it's equivalent to have paddingInner = 1 since there is only n-1 steps between n points. 1; - return "bandspace(".concat(cardinality, ", ").concat(signalOrStringValue(paddingInner), ", ").concat(signalOrStringValue(paddingOuter), ") * ").concat(scaleName, "_step"); + return `bandspace(${cardinality}, ${signalOrStringValue(paddingInner)}, ${signalOrStringValue(paddingOuter)}) * ${scaleName}_step`; } function getSizeTypeFromLayoutSizeType(layoutSizeType) { return layoutSizeType === 'childWidth' ? 'width' : layoutSizeType === 'childHeight' ? 'height' : layoutSizeType; } @@ -15177,14 +15272,16 @@ }; }, {}); } function defaultScaleResolve(channel, model) { - if (isLayerModel(model) || isFacetModel(model)) { + if (isFacetModel(model)) { + return channel === 'theta' ? 'independent' : 'shared'; + } else if (isLayerModel(model)) { return 'shared'; } else if (isConcatModel(model)) { - return isXorY(channel) ? 'independent' : 'shared'; + return isXorY(channel) || channel === 'theta' || channel === 'radius' ? 'independent' : 'shared'; } /* istanbul ignore next: should never reach here. */ throw new Error('invalid model type for resolve'); @@ -15223,22 +15320,20 @@ class LegendComponent extends Split {} const legendEncodeRules = { symbols, gradient, - labels, - entries: entries$1 + labels: labels$1, + entries }; function symbols(symbolsSpec, { fieldOrDatumDef, model, channel, legendCmpt, legendType }) { - var _legendCmpt$get, _legendCmpt$get2, _legendCmpt$get3, _getMaxValue; - if (legendType !== 'symbol') { return undefined; } const { @@ -15252,14 +15347,14 @@ ...color(model, { filled }) }; // FIXME: remove this when VgEncodeEntry is compatible with SymbolEncodeEntry - const symbolOpacity = (_legendCmpt$get = legendCmpt.get('symbolOpacity')) !== null && _legendCmpt$get !== void 0 ? _legendCmpt$get : config.legend.symbolOpacity; - const symbolFillColor = (_legendCmpt$get2 = legendCmpt.get('symbolFillColor')) !== null && _legendCmpt$get2 !== void 0 ? _legendCmpt$get2 : config.legend.symbolFillColor; - const symbolStrokeColor = (_legendCmpt$get3 = legendCmpt.get('symbolStrokeColor')) !== null && _legendCmpt$get3 !== void 0 ? _legendCmpt$get3 : config.legend.symbolStrokeColor; - const opacity = symbolOpacity === undefined ? (_getMaxValue = getMaxValue(encoding.opacity)) !== null && _getMaxValue !== void 0 ? _getMaxValue : markDef.opacity : undefined; + const symbolOpacity = legendCmpt.get('symbolOpacity') ?? config.legend.symbolOpacity; + const symbolFillColor = legendCmpt.get('symbolFillColor') ?? config.legend.symbolFillColor; + const symbolStrokeColor = legendCmpt.get('symbolStrokeColor') ?? config.legend.symbolStrokeColor; + const opacity = symbolOpacity === undefined ? getMaxValue(encoding.opacity) ?? markDef.opacity : undefined; if (out.fill) { // for fill legend, we don't want any fill in symbol if (channel === 'fill' || filled && channel === COLOR) { delete out.fill; @@ -15267,20 +15362,16 @@ if (out.fill['field']) { // For others, set fill to some opaque value (or nothing if a color is already set) if (symbolFillColor) { delete out.fill; } else { - var _config$legend$symbol; - - out.fill = signalOrValueRef((_config$legend$symbol = config.legend.symbolBaseFillColor) !== null && _config$legend$symbol !== void 0 ? _config$legend$symbol : 'black'); - out.fillOpacity = signalOrValueRef(opacity !== null && opacity !== void 0 ? opacity : 1); + out.fill = signalOrValueRef(config.legend.symbolBaseFillColor ?? 'black'); + out.fillOpacity = signalOrValueRef(opacity ?? 1); } - } else if (isArray(out.fill)) { - var _ref, _getFirstConditionVal, _encoding$fill; + } else if (vegaUtil.isArray(out.fill)) { + const fill = getFirstConditionValue(encoding.fill ?? encoding.color) ?? markDef.fill ?? (filled && markDef.color); - const fill = (_ref = (_getFirstConditionVal = getFirstConditionValue((_encoding$fill = encoding.fill) !== null && _encoding$fill !== void 0 ? _encoding$fill : encoding.color)) !== null && _getFirstConditionVal !== void 0 ? _getFirstConditionVal : markDef.fill) !== null && _ref !== void 0 ? _ref : filled && markDef.color; - if (fill) { out.fill = signalOrValueRef(fill); } } } @@ -15291,11 +15382,11 @@ delete out.stroke; } else { if (out.stroke['field'] || symbolStrokeColor) { // For others, remove stroke field delete out.stroke; - } else if (isArray(out.stroke)) { + } else if (vegaUtil.isArray(out.stroke)) { const stroke = getFirstDefined(getFirstConditionValue(encoding.stroke || encoding.color), markDef.stroke, filled ? markDef.color : undefined); if (stroke) { out.stroke = { value: stroke @@ -15309,11 +15400,11 @@ const condition = isFieldDef(fieldOrDatumDef) && selectedCondition(model, legendCmpt, fieldOrDatumDef); if (condition) { out.opacity = [{ test: condition, - ...signalOrValueRef(opacity !== null && opacity !== void 0 ? opacity : 1) + ...signalOrValueRef(opacity ?? 1) }, signalOrValueRef(config.legend.unselectedOpacity)]; } else if (opacity) { out.opacity = signalOrValueRef(opacity); } } @@ -15326,23 +15417,21 @@ function gradient(gradientSpec, { model, legendType, legendCmpt }) { - var _legendCmpt$get4; - if (legendType !== 'gradient') { return undefined; } const { config, markDef, encoding } = model; let out = {}; - const gradientOpacity = (_legendCmpt$get4 = legendCmpt.get('gradientOpacity')) !== null && _legendCmpt$get4 !== void 0 ? _legendCmpt$get4 : config.legend.gradientOpacity; + const gradientOpacity = legendCmpt.get('gradientOpacity') ?? config.legend.gradientOpacity; const opacity = gradientOpacity === undefined ? getMaxValue(encoding.opacity) || markDef.opacity : undefined; if (opacity) { // only apply opacity if it is neither zero or undefined out.opacity = signalOrValueRef(opacity); @@ -15351,11 +15440,11 @@ out = { ...out, ...gradientSpec }; return isEmpty(out) ? undefined : out; } - function labels(specifiedlabelsSpec, { + function labels$1(specifiedlabelsSpec, { fieldOrDatumDef, model, channel, legendCmpt }) { @@ -15387,15 +15476,15 @@ } : {}), ...specifiedlabelsSpec }; return isEmpty(labelsSpec) ? undefined : labelsSpec; } - function entries$1(entriesSpec, { + function entries(entriesSpec, { legendCmpt }) { const selections = legendCmpt.get('selections'); - return (selections === null || selections === void 0 ? void 0 : selections.length) ? { ...entriesSpec, + return selections !== null && selections !== void 0 && selections.length ? { ...entriesSpec, fill: { value: 'transparent' } } : entriesSpec; } @@ -15410,25 +15499,25 @@ }); } function getConditionValue(channelDef, reducer) { if (hasConditionalValueDef(channelDef)) { - return array(channelDef.condition).reduce(reducer, channelDef.value); + return vegaUtil.array(channelDef.condition).reduce(reducer, channelDef.value); } else if (isValueDef(channelDef)) { return channelDef.value; } return undefined; } function selectedCondition(model, legendCmpt, fieldDef) { const selections = legendCmpt.get('selections'); - if (!(selections === null || selections === void 0 ? void 0 : selections.length)) return undefined; - const field = $(fieldDef.field); + if (!(selections !== null && selections !== void 0 && selections.length)) return undefined; + const field = vegaUtil.stringValue(fieldDef.field); return selections.map(name => { - const store = $(varName(name) + STORE); - return "(!length(data(".concat(store, ")) || (").concat(name, "[").concat(field, "] && indexof(").concat(name, "[").concat(field, "], datum.value) >= 0))"); + const store = vegaUtil.stringValue(varName(name) + STORE); + return `(!length(data(${store})) || (${name}[${field}] && indexof(${name}[${field}], datum.value) >= 0))`; }).join(' || '); } const legendRules = { direction: ({ @@ -15454,37 +15543,27 @@ formatType } = legend; return guideFormatType(formatType, fieldOrDatumDef, scaleType); }, gradientLength: params => { - var _ref, _legend$gradientLengt; - const { legend, legendConfig } = params; - return (_ref = (_legend$gradientLengt = legend.gradientLength) !== null && _legend$gradientLengt !== void 0 ? _legend$gradientLengt : legendConfig.gradientLength) !== null && _ref !== void 0 ? _ref : defaultGradientLength(params); + return legend.gradientLength ?? legendConfig.gradientLength ?? defaultGradientLength(params); }, labelOverlap: ({ legend, legendConfig, scaleType - }) => { - var _ref2, _legend$labelOverlap; - - return (_ref2 = (_legend$labelOverlap = legend.labelOverlap) !== null && _legend$labelOverlap !== void 0 ? _legend$labelOverlap : legendConfig.labelOverlap) !== null && _ref2 !== void 0 ? _ref2 : defaultLabelOverlap$1(scaleType); - }, + }) => legend.labelOverlap ?? legendConfig.labelOverlap ?? defaultLabelOverlap(scaleType), symbolType: ({ legend, markDef, channel, encoding - }) => { - var _legend$symbolType; - - return (_legend$symbolType = legend.symbolType) !== null && _legend$symbolType !== void 0 ? _legend$symbolType : defaultSymbolType(markDef.type, channel, encoding.shape, markDef.shape); - }, + }) => legend.symbolType ?? defaultSymbolType(markDef.type, channel, encoding.shape, markDef.shape), title: ({ fieldOrDatumDef, config }) => title(fieldOrDatumDef, config, { allowDisabling: true @@ -15506,29 +15585,27 @@ }, // depended by other property, let's define upfront values: ({ fieldOrDatumDef, legend - }) => values$1(legend, fieldOrDatumDef) + }) => values(legend, fieldOrDatumDef) }; - function values$1(legend, fieldOrDatumDef) { + function values(legend, fieldOrDatumDef) { const vals = legend.values; - if (isArray(vals)) { + if (vegaUtil.isArray(vals)) { return valueArray(fieldOrDatumDef, vals); } else if (isSignalRef(vals)) { return vals; } return undefined; } function defaultSymbolType(mark, channel, shapeChannelDef, markShape) { if (channel !== 'shape') { - var _getFirstConditionVal; - // use the value from the shape encoding or the mark config if they exist - const shape = (_getFirstConditionVal = getFirstConditionValue(shapeChannelDef)) !== null && _getFirstConditionVal !== void 0 ? _getFirstConditionVal : markShape; + const shape = getFirstConditionValue(shapeChannelDef) ?? markShape; if (shape) { return shape; } } @@ -15583,13 +15660,11 @@ legendConfig, legendType, orient, legend }) { - var _ref3, _legend$direction; - - return (_ref3 = (_legend$direction = legend.direction) !== null && _legend$direction !== void 0 ? _legend$direction : legendConfig[legendType ? 'gradientDirection' : 'symbolDirection']) !== null && _ref3 !== void 0 ? _ref3 : defaultDirection(orient, legendType); + return legend.direction ?? legendConfig[legendType ? 'gradientDirection' : 'symbolDirection'] ?? defaultDirection(orient, legendType); } function defaultDirection(orient, legendType) { switch (orient) { case 'top': case 'bottom': @@ -15640,15 +15715,15 @@ } function gradientLengthSignal(model, sizeType, min, max) { const sizeSignal = model.getSizeSignalRef(sizeType).signal; return { - signal: "clamp(".concat(sizeSignal, ", ").concat(min, ", ").concat(max, ")") + signal: `clamp(${sizeSignal}, ${min}, ${max})` }; } - function defaultLabelOverlap$1(scaleType) { + function defaultLabelOverlap(scaleType) { if (contains(['quantile', 'threshold', 'log', 'symlog'], scaleType)) { return 'greedy'; } return undefined; @@ -15711,19 +15786,19 @@ [channel]: scale }; } // eslint-disable-next-line @typescript-eslint/ban-types - function isExplicit(value, property, legend, fieldDef) { + function isExplicit$1(value, property, legend, fieldDef) { switch (property) { case 'disable': return legend !== undefined; - // if axis is specified or null/false, then it's enable/disable state is explicit + // if axis is specified or null/false, then its enable/disable state is explicit case 'values': // specified legend.values is already respected, but may get transformed. - return !!(legend === null || legend === void 0 ? void 0 : legend.values); + return !!(legend !== null && legend !== void 0 && legend.values); case 'title': // title can be explicit if fieldDef.title is set if (property === 'title' && value === (fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.title)) { return true; @@ -15734,11 +15809,11 @@ return value === (legend || {})[property]; } function parseLegendForChannel(model, channel) { - var _normalizeTimeUnit, _legend$encoding, _legend; + var _normalizeTimeUnit, _legend; let legend = model.legend(channel); const { markDef, encoding, @@ -15792,19 +15867,19 @@ } const value = property in legendRules ? legendRules[property](ruleParams) : legend[property]; if (value !== undefined) { - const explicit = isExplicit(value, property, legend, model.fieldDef(channel)); + const explicit = isExplicit$1(value, property, legend, model.fieldDef(channel)); if (explicit || config.legend[property] === undefined) { legendCmpt.set(property, value, explicit); } } } - const legendEncoding = (_legend$encoding = (_legend = legend) === null || _legend === void 0 ? void 0 : _legend.encoding) !== null && _legend$encoding !== void 0 ? _legend$encoding : {}; + const legendEncoding = ((_legend = legend) === null || _legend === void 0 ? void 0 : _legend.encoding) ?? {}; const selections = legendCmpt.get('selections'); const legendEncode = {}; const legendEncodeParams = { fieldOrDatumDef, model, @@ -15812,32 +15887,30 @@ legendCmpt, legendType }; for (const part of ['labels', 'legend', 'title', 'symbols', 'gradient', 'entries']) { - var _legendEncoding$part; - - const legendEncodingPart = guideEncodeEntry((_legendEncoding$part = legendEncoding[part]) !== null && _legendEncoding$part !== void 0 ? _legendEncoding$part : {}, model); + const legendEncodingPart = guideEncodeEntry(legendEncoding[part] ?? {}, model); const value = part in legendEncodeRules ? legendEncodeRules[part](legendEncodingPart, legendEncodeParams) // apply rule : legendEncodingPart; // no rule -- just default values if (value !== undefined && !isEmpty(value)) { - legendEncode[part] = { ...((selections === null || selections === void 0 ? void 0 : selections.length) && isFieldDef(fieldOrDatumDef) ? { - name: "".concat(varName(fieldOrDatumDef.field), "_legend_").concat(part) + legendEncode[part] = { ...(selections !== null && selections !== void 0 && selections.length && isFieldDef(fieldOrDatumDef) ? { + name: `${varName(fieldOrDatumDef.field)}_legend_${part}` } : {}), - ...((selections === null || selections === void 0 ? void 0 : selections.length) ? { + ...(selections !== null && selections !== void 0 && selections.length ? { interactive: !!selections } : {}), update: value }; } } if (!isEmpty(legendEncode)) { var _legend2; - legendCmpt.set('encode', legendEncode, !!((_legend2 = legend) === null || _legend2 === void 0 ? void 0 : _legend2.encoding)); + legendCmpt.set('encode', legendEncode, !!((_legend2 = legend) !== null && _legend2 !== void 0 && _legend2.encoding)); } return legendCmpt; } @@ -15923,15 +15996,15 @@ } if (typeMerged) { var _mergedLegend$implici, _mergedLegend$implici2, _mergedLegend$explici, _mergedLegend$explici2; - if ((_mergedLegend$implici = mergedLegend.implicit) === null || _mergedLegend$implici === void 0 ? void 0 : (_mergedLegend$implici2 = _mergedLegend$implici.encode) === null || _mergedLegend$implici2 === void 0 ? void 0 : _mergedLegend$implici2.gradient) { + if ((_mergedLegend$implici = mergedLegend.implicit) !== null && _mergedLegend$implici !== void 0 && (_mergedLegend$implici2 = _mergedLegend$implici.encode) !== null && _mergedLegend$implici2 !== void 0 && _mergedLegend$implici2.gradient) { deleteNestedProperty(mergedLegend.implicit, ['encode', 'gradient']); } - if ((_mergedLegend$explici = mergedLegend.explicit) === null || _mergedLegend$explici === void 0 ? void 0 : (_mergedLegend$explici2 = _mergedLegend$explici.encode) === null || _mergedLegend$explici2 === void 0 ? void 0 : _mergedLegend$explici2.gradient) { + if ((_mergedLegend$explici = mergedLegend.explicit) !== null && _mergedLegend$explici !== void 0 && (_mergedLegend$explici2 = _mergedLegend$explici.encode) !== null && _mergedLegend$explici2 !== void 0 && _mergedLegend$explici2.gradient) { deleteNestedProperty(mergedLegend.explicit, ['encode', 'gradient']); } } return mergedLegend; @@ -15945,15 +16018,15 @@ return st1; } function setLegendEncode(legend, part, vgProp, vgRef) { - var _legend$encode, _legend$encode$part, _legend$encode$part$u; + var _legend$encode, _legend$encode$part; - legend.encode = (_legend$encode = legend.encode) !== null && _legend$encode !== void 0 ? _legend$encode : {}; - legend.encode[part] = (_legend$encode$part = legend.encode[part]) !== null && _legend$encode$part !== void 0 ? _legend$encode$part : {}; - legend.encode[part].update = (_legend$encode$part$u = legend.encode[part].update) !== null && _legend$encode$part$u !== void 0 ? _legend$encode$part$u : {}; // TODO: remove as any after https://github.com/prisma/nexus-prisma/issues/291 + legend.encode ?? (legend.encode = {}); + (_legend$encode = legend.encode)[part] ?? (_legend$encode[part] = {}); + (_legend$encode$part = legend.encode[part]).update ?? (_legend$encode$part.update = {}); // TODO: remove as any after https://github.com/prisma/nexus-prisma/issues/291 legend.encode[part].update[vgProp] = vgRef; } function assembleLegends(model) { @@ -15997,11 +16070,11 @@ if (config.aria === false && legend.aria == undefined) { legend.aria = false; } - if ((_legend$encode2 = legend.encode) === null || _legend$encode2 === void 0 ? void 0 : _legend$encode2.symbols) { + if ((_legend$encode2 = legend.encode) !== null && _legend$encode2 !== void 0 && _legend$encode2.symbols) { const out = legend.encode.symbols.update; if (out.fill && out.fill['value'] !== 'transparent' && !out.stroke && !legend.stroke) { // For non color channel's legend, we need to override symbol stroke config from Vega config if stroke channel is not used. out.stroke = { @@ -16025,11 +16098,11 @@ if (labelExpr !== undefined) { var _legend$encode3, _legend$encode3$label; let expr = labelExpr; - if (((_legend$encode3 = legend.encode) === null || _legend$encode3 === void 0 ? void 0 : (_legend$encode3$label = _legend$encode3.labels) === null || _legend$encode3$label === void 0 ? void 0 : _legend$encode3$label.update) && isSignalRef(legend.encode.labels.update.text)) { + if ((_legend$encode3 = legend.encode) !== null && _legend$encode3 !== void 0 && (_legend$encode3$label = _legend$encode3.labels) !== null && _legend$encode3$label !== void 0 && _legend$encode3$label.update && isSignalRef(legend.encode.labels.update.text)) { expr = replaceAll(labelExpr, 'datum.label', legend.encode.labels.update.text.signal); } setLegendEncode(legend, 'labels', 'text', { signal: expr @@ -16077,14 +16150,14 @@ ...projection }]; } else { // generate projection that uses extent fitting const size = { - signal: "[".concat(component.size.map(ref => ref.signal).join(', '), "]") + signal: `[${component.size.map(ref => ref.signal).join(', ')}]` }; const fits = component.data.reduce((sources, data) => { - const source = isSignalRef(data) ? data.signal : "data('".concat(model.lookupDataSource(data), "')"); + const source = isSignalRef(data) ? data.signal : `data('${model.lookupDataSource(data)}')`; if (!contains(sources, source)) { // build a unique list of sources sources.push(source); } @@ -16098,11 +16171,11 @@ return [{ name, size, fit: { - signal: fits.length > 1 ? "[".concat(fits.join(', '), "]") : fits[0] + signal: fits.length > 1 ? `[${fits.join(', ')}]` : fits[0] }, ...projection }]; } } @@ -16141,19 +16214,23 @@ model.component.projection = isUnitModel(model) ? parseUnitProjection(model) : parseNonUnitProjections(model); } function parseUnitProjection(model) { if (model.hasProjection) { - var _model$config$project; - - const proj = model.specifiedProjection; + const proj = replaceExprRef(model.specifiedProjection); const fit = !(proj && (proj.scale != null || proj.translate != null)); const size = fit ? [model.getSizeSignalRef('width'), model.getSizeSignalRef('height')] : undefined; const data = fit ? gatherFitData(model) : undefined; - return new ProjectionComponent(model.projectionName(true), { ...((_model$config$project = model.config.projection) !== null && _model$config$project !== void 0 ? _model$config$project : {}), - ...(proj !== null && proj !== void 0 ? proj : {}) + const projComp = new ProjectionComponent(model.projectionName(true), { ...(replaceExprRef(model.config.projection) ?? {}), + ...(proj ?? {}) }, size, data); + + if (!projComp.get('type')) { + projComp.set('type', 'equalEarth', false); + } + + return projComp; } return undefined; } @@ -16164,18 +16241,18 @@ } = model; for (const posssiblePair of [[LONGITUDE, LATITUDE], [LONGITUDE2, LATITUDE2]]) { if (getFieldOrDatumDef(encoding[posssiblePair[0]]) || getFieldOrDatumDef(encoding[posssiblePair[1]])) { data.push({ - signal: model.getName("geojson_".concat(data.length)) + signal: model.getName(`geojson_${data.length}`) }); } } if (model.channelHasField(SHAPE) && model.typedFieldDef(SHAPE).type === GEOJSON) { data.push({ - signal: model.getName("geojson_".concat(data.length)) + signal: model.getName(`geojson_${data.length}`) }); } if (data.length === 0) { // main source is geojson, so we can just use that @@ -16186,30 +16263,30 @@ } function mergeIfNoConflict(first, second) { const allPropertiesShared = every(PROJECTION_PROPERTIES, prop => { // neither has the property - if (!has(first.explicit, prop) && !has(second.explicit, prop)) { + if (!vegaUtil.hasOwnProperty(first.explicit, prop) && !vegaUtil.hasOwnProperty(second.explicit, prop)) { return true; } // both have property and an equal value for property - if (has(first.explicit, prop) && has(second.explicit, prop) && // some properties might be signals or objects and require hashing for comparison - stringify(first.get(prop)) === stringify(second.get(prop))) { + if (vegaUtil.hasOwnProperty(first.explicit, prop) && vegaUtil.hasOwnProperty(second.explicit, prop) && // some properties might be signals or objects and require hashing for comparison + deepEqual(first.get(prop), second.get(prop))) { return true; } return false; }); - const size = stringify(first.size) === stringify(second.size); + const size = deepEqual(first.size, second.size); if (size) { if (allPropertiesShared) { return first; - } else if (stringify(first.explicit) === stringify({})) { + } else if (deepEqual(first.explicit, {})) { return second; - } else if (stringify(second.explicit) === stringify({})) { + } else if (deepEqual(second.explicit, {})) { return first; } } // if all properties don't match, let each unit spec have its own projection @@ -16273,14 +16350,12 @@ return undefined; } function rangeFormula(model, fieldDef, channel, config) { if (binRequiresRange(fieldDef, channel)) { - var _ref, _model$axis; - // read format from axis or legend, if there is no format then use config.numberFormat - const guide = isUnitModel(model) ? (_ref = (_model$axis = model.axis(channel)) !== null && _model$axis !== void 0 ? _model$axis : model.legend(channel)) !== null && _ref !== void 0 ? _ref : {} : {}; + const guide = isUnitModel(model) ? model.axis(channel) ?? model.legend(channel) ?? {} : {}; const startField = vgField(fieldDef, { expr: 'datum' }); const endField = vgField(fieldDef, { expr: 'datum', @@ -16297,26 +16372,24 @@ return {}; } function binKey(bin, field) { - return "".concat(binToString(bin), "_").concat(field); + return `${binToString(bin)}_${field}`; } function getSignalsFromModel(model, key) { return { - signal: model.getName("".concat(key, "_bins")), - extentSignal: model.getName("".concat(key, "_extent")) + signal: model.getName(`${key}_bins`), + extentSignal: model.getName(`${key}_extent`) }; } function getBinSignalName(model, field, bin) { - var _normalizeBin; - - const normalizedBin = (_normalizeBin = normalizeBin(bin, undefined)) !== null && _normalizeBin !== void 0 ? _normalizeBin : {}; + const normalizedBin = normalizeBin(bin, undefined) ?? {}; const key = binKey(normalizedBin, field); - return model.getName("".concat(key, "_bins")); + return model.getName(`${key}_bins`); } function isBinTransform(t) { return 'as' in t; } @@ -16324,11 +16397,11 @@ function createBinComponent(t, bin, model) { let as; let span; if (isBinTransform(t)) { - as = isString(t.as) ? [t.as, "".concat(t.as, "_end")] : [t.as[0], t.as[1]]; + as = vegaUtil.isString(t.as) ? [t.as, `${t.as}_end`] : [t.as[0], t.as[1]]; } else { as = [vgField(t, { forAs: true }), vgField(t, { binSuffix: 'end', @@ -16342,14 +16415,13 @@ const { signal, extentSignal } = getSignalsFromModel(model, key); - if (isSelectionExtent(normalizedBin.extent)) { + if (isParameterExtent(normalizedBin.extent)) { const ext = normalizedBin.extent; - const selName = ext.selection; - span = parseSelectionBinExtent(model.getSelectionComponent(varName(selName), selName), ext); + span = parseSelectionExtent(model, ext.param, ext); delete normalizedBin.extent; // Vega-Lite selection extent map to Vega's span property. } const binComponent = { bin: normalizedBin, @@ -16450,11 +16522,11 @@ dependentFields() { return new Set(vals(this.bins).map(c => c.field)); } hash() { - return "Bin ".concat(hash(this.bins)); + return `Bin ${hash(this.bins)}`; } assemble() { return vals(this.bins).flatMap(bin => { const transform = []; @@ -16466,18 +16538,18 @@ const binTrans = { type: 'bin', field: replacePathInField(bin.field), as: binAs, signal: bin.signal, - ...(!isSelectionExtent(extent) ? { + ...(!isParameterExtent(extent) ? { extent } : { extent: null }), ...(bin.span ? { span: { - signal: "span(".concat(bin.span, ")") + signal: `span(${bin.span})` } } : {}), ...params }; @@ -16521,13 +16593,15 @@ } } function addDimension(dims, channel, fieldDef, model) { + var _fieldDef$scale; + const channelDef2 = isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined; - if (isTypedFieldDef(fieldDef) && isUnitModel(model) && hasBand(channel, fieldDef, channelDef2, model.stack, model.markDef, model.config)) { + if (isTypedFieldDef(fieldDef) && isUnitModel(model) && hasBandEnd(fieldDef, channelDef2, model.markDef, model.config)) { dims.add(vgField(fieldDef, {})); dims.add(vgField(fieldDef, { suffix: 'end' })); @@ -16541,24 +16615,26 @@ dims.add(model.getName(posChannel)); } else { dims.add(vgField(fieldDef)); } + if (isScaleFieldDef(fieldDef) && isFieldRange((_fieldDef$scale = fieldDef.scale) === null || _fieldDef$scale === void 0 ? void 0 : _fieldDef$scale.range)) { + dims.add(fieldDef.scale.range.field); + } + return dims; } function mergeMeasures(parentMeasures, childMeasures) { for (const field of keys(childMeasures)) { // when we merge a measure, we either have to add an aggregation operator or even a new field const ops = childMeasures[field]; for (const op of keys(ops)) { if (field in parentMeasures) { - var _parentMeasures$field; - // add operator to existing measure field - parentMeasures[field][op] = new Set([...((_parentMeasures$field = parentMeasures[field][op]) !== null && _parentMeasures$field !== void 0 ? _parentMeasures$field : []), ...ops[op]]); + parentMeasures[field][op] = new Set([...(parentMeasures[field][op] ?? []), ...ops[op]]); } else { parentMeasures[field] = { [op]: ops[op] }; } @@ -16607,43 +16683,37 @@ field } = fieldDef; if (aggregate) { if (aggregate === 'count') { - var _meas$; + var _; - meas['*'] = (_meas$ = meas['*']) !== null && _meas$ !== void 0 ? _meas$ : {}; + meas[_ = '*'] ?? (meas[_] = {}); meas['*']['count'] = new Set([vgField(fieldDef, { forAs: true })]); } else { if (isArgminDef(aggregate) || isArgmaxDef(aggregate)) { - var _meas$argField; - const op = isArgminDef(aggregate) ? 'argmin' : 'argmax'; const argField = aggregate[op]; - meas[argField] = (_meas$argField = meas[argField]) !== null && _meas$argField !== void 0 ? _meas$argField : {}; + meas[argField] ?? (meas[argField] = {}); meas[argField][op] = new Set([vgField({ op, field: argField }, { forAs: true })]); } else { - var _meas$field; - - meas[field] = (_meas$field = meas[field]) !== null && _meas$field !== void 0 ? _meas$field : {}; + meas[field] ?? (meas[field] = {}); meas[field][aggregate] = new Set([vgField(fieldDef, { forAs: true })]); } // For scale channel with domain === 'unaggregated', add min/max so we can use their union as unaggregated domain if (isScaleChannel(channel) && model.scaleDomain(channel) === 'unaggregated') { - var _meas$field2; - - meas[field] = (_meas$field2 = meas[field]) !== null && _meas$field2 !== void 0 ? _meas$field2 : {}; + meas[field] ?? (meas[field] = {}); meas[field]['min'] = new Set([vgField({ field, aggregate: 'min' }, { forAs: true @@ -16679,30 +16749,26 @@ as } = s; if (op) { if (op === 'count') { - var _meas$2; + var _2; - meas['*'] = (_meas$2 = meas['*']) !== null && _meas$2 !== void 0 ? _meas$2 : {}; + meas[_2 = '*'] ?? (meas[_2] = {}); meas['*']['count'] = new Set([as ? as : vgField(s, { forAs: true })]); } else { - var _meas$field3; - - meas[field] = (_meas$field3 = meas[field]) !== null && _meas$field3 !== void 0 ? _meas$field3 : {}; + meas[field] ?? (meas[field] = {}); meas[field][op] = new Set([as ? as : vgField(s, { forAs: true })]); } } } - for (const s of (_t$groupby = t.groupby) !== null && _t$groupby !== void 0 ? _t$groupby : []) { - var _t$groupby; - + for (const s of t.groupby ?? []) { dims.add(s); } if (dims.size + keys(meas).length === 0) { return null; @@ -16735,25 +16801,25 @@ for (const field of keys(this.measures)) { for (const op of keys(this.measures[field])) { const m = this.measures[field][op]; if (m.size === 0) { - out.add("".concat(op, "_").concat(field)); + out.add(`${op}_${field}`); } else { m.forEach(out.add, out); } } } return out; } hash() { - return "Aggregate ".concat(hash({ - dimensions: this.dimensions, - measures: this.measures - })); + return `Aggregate ${hash({ + dimensions: this.dimensions, + measures: this.measures + })}`; } assemble() { const ops = []; const fields = []; @@ -16811,32 +16877,32 @@ const { bin, sort } = fieldDef; this[channel] = { - name: model.getName("".concat(channel, "_domain")), + name: model.getName(`${channel}_domain`), fields: [vgField(fieldDef), ...(isBinning(bin) ? [vgField(fieldDef, { binSuffix: 'end' })] : [])], ...(isSortField(sort) ? { sortField: sort - } : isArray(sort) ? { + } : vegaUtil.isArray(sort) ? { sortIndexField: sortArrayIndexField(fieldDef, channel) } : {}) }; } } this.childModel = model.child; } hash() { - let out = "Facet"; + let out = `Facet`; for (const channel of FACET_CHANNELS) { if (this[channel]) { - out += " ".concat(channel.charAt(0), ":").concat(hash(this[channel])); + out += ` ${channel.charAt(0)}:${hash(this[channel])}`; } } return out; } @@ -16845,11 +16911,11 @@ const f = []; for (const channel of FACET_CHANNELS) { var _this$channel; - if ((_this$channel = this[channel]) === null || _this$channel === void 0 ? void 0 : _this$channel.fields) { + if ((_this$channel = this[channel]) !== null && _this$channel !== void 0 && _this$channel.fields) { f.push(...this[channel].fields); } } return f; @@ -16913,29 +16979,30 @@ } assembleRowColumnHeaderData(channel, crossedDataName, childIndependentFieldsWithStep) { const childChannel = { row: 'y', - column: 'x' + column: 'x', + facet: undefined }[channel]; const fields = []; const ops = []; const as = []; - if (childIndependentFieldsWithStep && childIndependentFieldsWithStep[childChannel]) { + if (childChannel && childIndependentFieldsWithStep && childIndependentFieldsWithStep[childChannel]) { if (crossedDataName) { // If there is a crossed data, calculate max - fields.push("distinct_".concat(childIndependentFieldsWithStep[childChannel])); + fields.push(`distinct_${childIndependentFieldsWithStep[childChannel]}`); ops.push('max'); } else { // If there is no crossed data, just calculate distinct fields.push(childIndependentFieldsWithStep[childChannel]); ops.push('distinct'); } // Although it is technically a max, just name it distinct so it's easier to refer to it - as.push("distinct_".concat(childIndependentFieldsWithStep[childChannel])); + as.push(`distinct_${childIndependentFieldsWithStep[childChannel]}`); } const { sortField, sortIndexField @@ -16958,11 +17025,11 @@ } return { name: this[channel].name, // Use data from the crossed one if it exist - source: crossedDataName !== null && crossedDataName !== void 0 ? crossedDataName : this.data, + source: crossedDataName ?? this.data, transform: [{ type: 'aggregate', groupby: this[channel].fields, ...(fields.length ? { fields, @@ -16983,14 +17050,12 @@ const data = []; const hasSharedAxis = {}; for (const headerChannel of HEADER_CHANNELS) { for (const headerType of HEADER_TYPES) { - var _ref; + const headers = (layoutHeaders[headerChannel] && layoutHeaders[headerChannel][headerType]) ?? []; - const headers = (_ref = layoutHeaders[headerChannel] && layoutHeaders[headerChannel][headerType]) !== null && _ref !== void 0 ? _ref : []; - for (const header of headers) { var _header$axes; if (((_header$axes = header.axes) === null || _header$axes === void 0 ? void 0 : _header$axes.length) > 0) { hasSharedAxis[headerChannel] = true; @@ -16998,20 +17063,20 @@ } } } if (hasSharedAxis[headerChannel]) { - const cardinality = "length(data(\"".concat(this.facet.name, "\"))"); + const cardinality = `length(data("${this.facet.name}"))`; const stop = headerChannel === 'row' ? columns ? { - signal: "ceil(".concat(cardinality, " / ").concat(columns, ")") + signal: `ceil(${cardinality} / ${columns})` } : 1 : columns ? { - signal: "min(".concat(cardinality, ", ").concat(columns, ")") + signal: `min(${cardinality}, ${columns})` } : { signal: cardinality }; data.push({ - name: "".concat(this.facet.name, "_").concat(headerChannel), + name: `${this.facet.name}_${headerChannel}`, transform: [{ type: 'sequence', start: 0, stop }] @@ -17040,15 +17105,13 @@ row, facet } = this; if (column && row && (childIndependentFieldsWithStep.x || childIndependentFieldsWithStep.y)) { - var _childIndependentFiel, _childIndependentFiel2; - // Need to create a cross dataset to correctly calculate cardinality - crossedDataName = "cross_".concat(this.column.name, "_").concat(this.row.name); - const fields = [].concat((_childIndependentFiel = childIndependentFieldsWithStep.x) !== null && _childIndependentFiel !== void 0 ? _childIndependentFiel : [], (_childIndependentFiel2 = childIndependentFieldsWithStep.y) !== null && _childIndependentFiel2 !== void 0 ? _childIndependentFiel2 : []); + crossedDataName = `cross_${this.column.name}_${this.row.name}`; + const fields = [].concat(childIndependentFieldsWithStep.x ?? [], childIndependentFieldsWithStep.y ?? []); const ops = fields.map(() => 'distinct'); data.push({ name: crossedDataName, source: this.data, transform: [{ @@ -17094,29 +17157,29 @@ * @param field The field. * @param parse What to parse the field as. */ - function parseExpression$1(field, parse) { + function parseExpression(field, parse) { const f = accessPathWithDatum(field); if (parse === 'number') { - return "toNumber(".concat(f, ")"); + return `toNumber(${f})`; } else if (parse === 'boolean') { - return "toBoolean(".concat(f, ")"); + return `toBoolean(${f})`; } else if (parse === 'string') { - return "toString(".concat(f, ")"); + return `toString(${f})`; } else if (parse === 'date') { - return "toDate(".concat(f, ")"); + return `toDate(${f})`; } else if (parse === 'flatten') { return f; - } else if (parse.indexOf('date:') === 0) { + } else if (parse.startsWith('date:')) { const specifier = unquote(parse.slice(5, parse.length)); - return "timeParse(".concat(f, ",'").concat(specifier, "')"); - } else if (parse.indexOf('utc:') === 0) { + return `timeParse(${f},'${specifier}')`; + } else if (parse.startsWith('utc:')) { const specifier = unquote(parse.slice(4, parse.length)); - return "utcParse(".concat(f, ",'").concat(specifier, "')"); + return `utcParse(${f},'${specifier}')`; } else { warn(unrecognizedParse(parse)); return null; } } @@ -17141,22 +17204,20 @@ } else if (isFieldGTEPredicate(filter)) { val = signalRefOrValue(filter.gte); } else if (isFieldRangePredicate(filter)) { val = filter.range[0]; } else if (isFieldOneOfPredicate(filter)) { - var _filter$oneOf; - - val = ((_filter$oneOf = filter.oneOf) !== null && _filter$oneOf !== void 0 ? _filter$oneOf : filter['in'])[0]; + val = (filter.oneOf ?? filter['in'])[0]; } // else -- for filter expression, we can't infer anything if (val) { if (isDateTime(val)) { implicit[filter.field] = 'date'; - } else if (isNumber(val)) { + } else if (vegaUtil.isNumber(val)) { implicit[filter.field] = 'number'; - } else if (isString(val)) { + } else if (vegaUtil.isString(val)) { implicit[filter.field] = 'string'; } } if (filter.timeUnit) { @@ -17176,12 +17237,12 @@ function add(fieldDef) { if (isFieldOrDatumDefForTimeFormat(fieldDef)) { implicit[fieldDef.field] = 'date'; } else if (fieldDef.type === 'quantitative' && isMinMaxOp(fieldDef.aggregate) // we need to parse numbers to support correct min and max ) { - implicit[fieldDef.field] = 'number'; - } else if (accessPathDepth(fieldDef.field) > 1) { + implicit[fieldDef.field] = 'number'; + } else if (accessPathDepth(fieldDef.field) > 1) { // For non-date/non-number (strings and booleans), derive a flattened field for a referenced nested field. // (Parsing numbers / dates already flattens numeric and temporal fields.) if (!(fieldDef.field in implicit)) { implicit[fieldDef.field] = 'flatten'; } @@ -17262,23 +17323,25 @@ this._parse = parse; } hash() { - return "Parse ".concat(hash(this._parse)); + return `Parse ${hash(this._parse)}`; } /** * Creates a parse node from a data.format.parse and updates ancestorParse. */ static makeExplicit(parent, model, ancestorParse) { + var _data$format; + // Custom parse let explicit = {}; const data = model.data; - if (!isGenerator(data) && data && data.format && data.format.parse) { + if (!isGenerator(data) && data !== null && data !== void 0 && (_data$format = data.format) !== null && _data$format !== void 0 && _data$format.parse) { explicit = data.format.parse; } return this.makeWithAncestors(parent, explicit, {}, ancestorParse); } @@ -17374,11 +17437,11 @@ return new Set(keys(this._parse)); } assembleTransforms(onlyNested = false) { return keys(this._parse).filter(field => onlyNested ? accessPathDepth(field) > 1 : true).map(field => { - const expr = parseExpression$1(field, this._parse[field]); + const expr = parseExpression(field, this._parse[field]); if (!expr) { return null; } @@ -17441,11 +17504,11 @@ producedFields() { return undefined; // there should never be a node before graticule } hash() { - return "Graticule ".concat(hash(this.params)); + return `Graticule ${hash(this.params)}`; } assemble() { return { type: 'graticule', @@ -17468,17 +17531,15 @@ dependentFields() { return new Set(); } producedFields() { - var _this$params$as; - - return new Set([(_this$params$as = this.params.as) !== null && _this$params$as !== void 0 ? _this$params$as : 'data']); + return new Set([this.params.as ?? 'data']); } hash() { - return "Hash ".concat(hash(this.params)); + return `Hash ${hash(this.params)}`; } assemble() { return { type: 'sequence', @@ -17488,23 +17549,21 @@ } class SourceNode extends DataFlowNode { constructor(data) { - var _data; - super(null); // source cannot have parent _defineProperty(this, "_data", void 0); _defineProperty(this, "_name", void 0); _defineProperty(this, "_generator", void 0); - data = (_data = data) !== null && _data !== void 0 ? _data : { + data ?? (data = { name: 'source' - }; + }); let format; if (!isGenerator(data)) { format = data.format ? { ...omit(data.format, ['parse']) } : {}; @@ -17614,11 +17673,11 @@ /** * Abstract base class for Dataflow optimizers. * Contains only mutation handling logic. Subclasses need to implement iteration logic. */ - var _modified = new WeakMap(); + var _modified = /*#__PURE__*/new WeakMap(); class Optimizer { constructor() { _modified.set(this, { writable: true, @@ -18132,17 +18191,15 @@ producedFields() { return new Set(this.transform.joinaggregate.map(this.getDefaultName)); } getDefaultName(joinAggregateFieldDef) { - var _joinAggregateFieldDe; - - return (_joinAggregateFieldDe = joinAggregateFieldDef.as) !== null && _joinAggregateFieldDe !== void 0 ? _joinAggregateFieldDe : vgField(joinAggregateFieldDef); + return joinAggregateFieldDef.as ?? vgField(joinAggregateFieldDef); } hash() { - return "JoinAggregateTransform ".concat(hash(this.transform)); + return `JoinAggregateTransform ${hash(this.transform)}`; } assemble() { const fields = []; const ops = []; @@ -18181,11 +18238,11 @@ return fields; }, []); } function isValidAsArray(as) { - return isArray(as) && as.every(s => isString(s)) && as.length > 1; + return vegaUtil.isArray(as) && as.every(s => vegaUtil.isString(s)) && as.length > 1; } class StackNode extends DataFlowNode { clone() { return new StackNode(null, duplicate(this._stack)); @@ -18222,14 +18279,14 @@ }; let normalizedAs; if (isValidAsArray(as)) { normalizedAs = as; - } else if (isString(as)) { - normalizedAs = [as, as + '_end']; + } else if (vegaUtil.isString(as)) { + normalizedAs = [as, `${as}_end`]; } else { - normalizedAs = [stackTransform.stack + '_start', stackTransform.stack + '_end']; + normalizedAs = [`${stackTransform.stack}_start`, `${stackTransform.stack}_end`]; } return new StackNode(parent, { stackField: stack, groupby, @@ -18265,11 +18322,11 @@ const stackby = getStackByFields(model); const orderDef = model.encoding.order; let sort; - if (isArray(orderDef) || isFieldDef(orderDef)) { + if (vegaUtil.isArray(orderDef) || isFieldDef(orderDef)) { sort = sortParams(orderDef); } else { // default = descending by stackFields // FIXME is the default here correct for binned fields? sort = stackby.reduce((s, field) => { @@ -18323,11 +18380,11 @@ producedFields() { return new Set(this._stack.as); } hash() { - return "Stack ".concat(hash(this._stack)); + return `Stack ${hash(this._stack)}`; } getGroupbyFields() { const { dimensionFieldDef, @@ -18352,11 +18409,11 @@ } return [vgField(dimensionFieldDef)]; } - return groupby !== null && groupby !== void 0 ? groupby : []; + return groupby ?? []; } assemble() { const transform = []; const { @@ -18370,22 +18427,22 @@ as } = this._stack; // Impute if (impute && dimensionFieldDef) { const { - band = 0.5, + bandPosition = 0.5, bin } = dimensionFieldDef; if (bin) { // As we can only impute one field at a time, we need to calculate // mid point for a binned field transform.push({ type: 'formula', - expr: "".concat(band, "*") + vgField(dimensionFieldDef, { + expr: `${bandPosition}*` + vgField(dimensionFieldDef, { expr: 'datum' - }) + "+".concat(1 - band, "*") + vgField(dimensionFieldDef, { + }) + `+${1 - bandPosition}*` + vgField(dimensionFieldDef, { expr: 'datum', binSuffix: 'end' }), as: vgField(dimensionFieldDef, { binSuffix: 'mid', @@ -18437,31 +18494,27 @@ addDimensions(fields) { this.transform.groupby = unique(this.transform.groupby.concat(fields), d => d); } dependentFields() { - var _this$transform$group, _this$transform$sort; - const out = new Set(); - ((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : []).forEach(out.add, out); - ((_this$transform$sort = this.transform.sort) !== null && _this$transform$sort !== void 0 ? _this$transform$sort : []).forEach(m => out.add(m.field)); + (this.transform.groupby ?? []).forEach(out.add, out); + (this.transform.sort ?? []).forEach(m => out.add(m.field)); this.transform.window.map(w => w.field).filter(f => f !== undefined).forEach(out.add, out); return out; } producedFields() { return new Set(this.transform.window.map(this.getDefaultName)); } getDefaultName(windowFieldDef) { - var _windowFieldDef$as; - - return (_windowFieldDef$as = windowFieldDef.as) !== null && _windowFieldDef$as !== void 0 ? _windowFieldDef$as : vgField(windowFieldDef); + return windowFieldDef.as ?? vgField(windowFieldDef); } hash() { - return "WindowTransform ".concat(hash(this.transform)); + return `WindowTransform ${hash(this.transform)}`; } assemble() { const fields = []; const ops = []; @@ -18494,14 +18547,12 @@ const sortFields = []; const sortOrder = []; if (this.transform.sort !== undefined) { for (const sortField of this.transform.sort) { - var _sortField$order; - sortFields.push(sortField.field); - sortOrder.push((_sortField$order = sortField.order) !== null && _sortField$order !== void 0 ? _sortField$order : 'ascending'); + sortOrder.push(sortField.order ?? 'ascending'); } } const sort = { field: sortFields, @@ -18703,11 +18754,11 @@ checkLinks(data.sources); if (Math.max(firstPassCounter, secondPassCounter) === MAX_OPTIMIZATION_RUNS) { - warn("Maximum optimization runs(".concat(MAX_OPTIMIZATION_RUNS, ") reached.")); + warn(`Maximum optimization runs(${MAX_OPTIMIZATION_RUNS}) reached.`); } } /** * A class that behaves like a SignalRef but lazily generates the signal. @@ -18791,11 +18842,11 @@ domains = mergeValuesWithExplicit(domains, childComponent.getWithExplicit('domains'), 'domains', 'scale', domainsTieBreaker); } const se = childComponent.get('selectionExtent'); - if (selectionExtent && se && selectionExtent.selection !== se.selection) { + if (selectionExtent && se && selectionExtent.param !== se.param) { warn(NEEDS_SAME_SELECTION); } selectionExtent = se; } @@ -18875,11 +18926,11 @@ const data = valueExpr(v, { timeUnit, type }); return { - signal: "{data: ".concat(data, "}") + signal: `{data: ${data}}` }; }); } function convertDomainIfItIsDateTime(domain, type, timeUnit) { @@ -18909,11 +18960,11 @@ const defaultDomain = parseSingleChannelDomain(scaleType, undefined, model, channel); const unionWith = convertDomainIfItIsDateTime(domain.unionWith, type, timeUnit); return makeExplicit([...defaultDomain.value, ...unionWith]); } else if (isSignalRef(domain)) { return makeExplicit([domain]); - } else if (domain && domain !== 'unaggregated' && !isSelectionDomain(domain)) { + } else if (domain && domain !== 'unaggregated' && !isParameterDomain(domain)) { return makeExplicit(convertDomainIfItIsDateTime(domain, type, timeUnit)); } const stack = model.stack; @@ -18973,17 +19024,17 @@ return makeImplicit([{ // If sort by aggregation of a specified sort field, we need to use RAW table, // so we can aggregate values for the scale independently from the main aggregation. - data: isBoolean$1(sort) ? model.requestDataName(DataSourceType.Main) : model.requestDataName(DataSourceType.Raw), + data: isBoolean(sort) ? model.requestDataName(DataSourceType.Main) : model.requestDataName(DataSourceType.Raw), // Use range if we added it and the scale does not support computing a range as a signal. field: model.vgField(channel, binRequiresRange(fieldDef, channel) ? { binSuffix: 'range' } : {}), // we have to use a sort object if sort = true to make the sort correct by bin start - sort: sort === true || !isObject(sort) ? { + sort: sort === true || !vegaUtil.isObject(sort) ? { field: model.vgField(channel, {}), op: 'min' // min or max doesn't matter since we sort by the start of the bin range } : sort }]); @@ -18995,20 +19046,20 @@ if (isBinning(bin)) { const binSignal = getBinSignalName(model, fieldDef.field, bin); return makeImplicit([new SignalRefWrapper(() => { const signal = model.getSignalName(binSignal); - return "[".concat(signal, ".start, ").concat(signal, ".stop]"); + return `[${signal}.start, ${signal}.stop]`; })]); } else { return makeImplicit([{ data: model.requestDataName(DataSourceType.Main), field: model.vgField(channel, {}) }]); } } - } else if (fieldDef.timeUnit && contains(['time', 'utc'], scaleType) && hasBand(channel, fieldDef, isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined, model.stack, model.markDef, model.config)) { + } else if (fieldDef.timeUnit && contains(['time', 'utc'], scaleType) && hasBandEnd(fieldDef, isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined, model.markDef, model.config)) { const data = model.requestDataName(DataSourceType.Main); return makeImplicit([{ data, field: model.vgField(channel) }, { @@ -19019,11 +19070,11 @@ }]); } else if (sort) { return makeImplicit([{ // If sort by aggregation of a specified sort field, we need to use RAW table, // so we can aggregate values for the scale independently from the main aggregation. - data: isBoolean$1(sort) ? model.requestDataName(DataSourceType.Main) : model.requestDataName(DataSourceType.Raw), + data: isBoolean(sort) ? model.requestDataName(DataSourceType.Main) : model.requestDataName(DataSourceType.Raw), field: model.vgField(channel), sort: sort }]); } else { return makeImplicit([{ @@ -19039,11 +19090,11 @@ field, order } = sort; return { // Apply default op - op: op !== null && op !== void 0 ? op : isStackedMeasure ? 'sum' : DEFAULT_SORT_OP, + op: op ?? (isStackedMeasure ? 'sum' : DEFAULT_SORT_OP), // flatten nested fields ...(field ? { field: replacePathInField(field) } : {}), ...(order ? { @@ -19056,18 +19107,18 @@ var _model$fieldDef; const scale = model.component.scales[channel]; const spec = model.specifiedScales[channel].domain; const bin = (_model$fieldDef = model.fieldDef(channel)) === null || _model$fieldDef === void 0 ? void 0 : _model$fieldDef.bin; - const domain = isSelectionDomain(spec) && spec; - const extent = isBinParams(bin) && isSelectionExtent(bin.extent) && bin.extent; + const domain = isParameterDomain(spec) && spec; + const extent = isBinParams(bin) && isParameterExtent(bin.extent) && bin.extent; if (domain || extent) { // As scale parsing occurs before selection parsing, we cannot set // domainRaw directly. So instead, we store the selectionExtent on // the scale component, and then add domainRaw during scale assembly. - scale.set('selectionExtent', domain !== null && domain !== void 0 ? domain : extent, true); + scale.set('selectionExtent', domain ?? extent, true); } } function domainSort(model, channel, scaleType) { if (!hasDiscreteDomain(scaleType)) { @@ -19153,11 +19204,11 @@ valid: false, reason: unaggregateDomainHasNoEffectForRawField(fieldDef) }; } - if (isString(aggregate) && !SHARED_DOMAIN_OP_INDEX[aggregate]) { + if (vegaUtil.isString(aggregate) && !SHARED_DOMAIN_OP_INDEX[aggregate]) { return { valid: false, reason: unaggregateDomainWithNonSharedDomainOp(aggregate) }; } @@ -19210,11 +19261,11 @@ }), hash); const sorts = unique(domains.map(d => { if (isDataRefDomain(d)) { const s = d.sort; - if (s !== undefined && !isBoolean$1(s)) { + if (s !== undefined && !isBoolean(s)) { if ('op' in s && s.op === 'count') { // let's make sure that if op is count, we don't use a field delete s.field; } @@ -19241,11 +19292,11 @@ if (sorts.length > 1) { warn(MORE_THAN_ONE_SORT); sort = true; } else { // Simplify domain sort by removing field and op when the field is the same as the domain field. - if (isObject(sort) && 'field' in sort) { + if (vegaUtil.isObject(sort) && 'field' in sort) { const sortField = sort.field; if (domain.field === sortField) { sort = sort.order ? { order: sort.order @@ -19262,11 +19313,11 @@ return domain; } // only keep sort properties that work with unioned domains const unionDomainSorts = unique(sorts.map(s => { - if (isBoolean$1(s) || !('op' in s) || isString(s.op) && s.op in MULTIDOMAIN_SORT_OP_INDEX) { + if (isBoolean(s) || !('op' in s) || vegaUtil.isString(s.op) && s.op in MULTIDOMAIN_SORT_OP_INDEX) { return s; } warn(domainSortDropped(s)); return true; @@ -19311,17 +19362,17 @@ * Return a field if a scale uses a single field. * Return `undefined` otherwise. */ function getFieldFromDomain(domain) { - if (isDataRefDomain(domain) && isString(domain.field)) { + if (isDataRefDomain(domain) && vegaUtil.isString(domain.field)) { return domain.field; } else if (isDataRefUnionedDomain(domain)) { let field; for (const nonUnionDomain of domain.fields) { - if (isDataRefDomain(nonUnionDomain) && isString(nonUnionDomain.field)) { + if (isDataRefDomain(nonUnionDomain) && vegaUtil.isString(nonUnionDomain.field)) { if (!field) { field = nonUnionDomain.field; } else if (field !== nonUnionDomain.field) { warn(FACETED_INDEPENDENT_DIFFERENT_SOURCES); return field; @@ -19332,11 +19383,11 @@ warn(FACETED_INDEPENDENT_SAME_FIELDS_DIFFERENT_SOURCES); return field; } else if (isFieldRefUnionDomain(domain)) { warn(FACETED_INDEPENDENT_SAME_SOURCE); const field = domain.fields[0]; - return isString(field) ? field : undefined; + return vegaUtil.isString(field) ? field : undefined; } return undefined; } function assembleDomain(model, channel) { @@ -19385,17 +19436,12 @@ range: _r, reverse, ...otherScaleProps } = scale; const range = assembleScaleRange(scale.range, name, channel, model); - let domainRaw; - - if (selectionExtent) { - domainRaw = assembleSelectionScaleDomain(model, selectionExtent); - } - const domain = assembleDomain(model, channel); + const domainRaw = selectionExtent ? assembleSelectionScaleDomain(model, selectionExtent, scaleComponent, domain) : null; scales.push({ name, type, ...(domain ? { domain @@ -19417,15 +19463,15 @@ if (isXorY(channel)) { if (isVgRangeStep(scaleRange)) { // For width/height step, use a signal created in layout assemble instead of a constant step. return { step: { - signal: scaleName + '_step' + signal: `${scaleName}_step` } }; } - } else if (isObject(scaleRange) && isDataRefDomain(scaleRange)) { + } else if (vegaUtil.isObject(scaleRange) && isDataRefDomain(scaleRange)) { return { ...scaleRange, data: model.lookupDataSource(scaleRange.data) }; } @@ -19457,18 +19503,18 @@ domainDefinitelyIncludesZero() { if (this.get('zero') !== false) { return true; } - return some(this.get('domains'), d => isArray(d) && d.length === 2 && d[0] <= 0 && d[1] >= 0); + return some(this.get('domains'), d => vegaUtil.isArray(d) && d.length === 2 && d[0] <= 0 && d[1] >= 0); } } const RANGE_PROPERTIES = ['range', 'scheme']; - function getSizeChannel$1(channel) { + function getSizeChannel(channel) { return channel === 'x' ? 'width' : channel === 'y' ? 'height' : undefined; } function parseUnitScaleRange(model) { const localScaleComponents = model.component.scales; // use SCALE_CHANNELS instead of scales[channel] to ensure that x, y come first! @@ -19486,20 +19532,33 @@ } function getBinStepSignal(model, channel) { const fieldDef = model.fieldDef(channel); - if (fieldDef && fieldDef.bin && isBinning(fieldDef.bin)) { - const binSignal = getBinSignalName(model, fieldDef.field, fieldDef.bin); // TODO: extract this to be range step signal - - const sizeType = getSizeChannel$1(channel); + if (fieldDef !== null && fieldDef !== void 0 && fieldDef.bin) { + const { + bin, + field + } = fieldDef; + const sizeType = getSizeChannel(channel); const sizeSignal = model.getName(sizeType); - return new SignalRefWrapper(() => { - const updatedName = model.getSignalName(binSignal); - const binCount = "(".concat(updatedName, ".stop - ").concat(updatedName, ".start) / ").concat(updatedName, ".step"); - return "".concat(model.getSignalName(sizeSignal), " / (").concat(binCount, ")"); - }); + + if (vegaUtil.isObject(bin) && bin.binned && bin.step !== undefined) { + return new SignalRefWrapper(() => { + const scaleName = model.scaleName(channel); + const binCount = `(domain("${scaleName}")[1] - domain("${scaleName}")[0]) / ${bin.step}`; + return `${model.getSignalName(sizeSignal)} / (${binCount})`; + }); + } else if (isBinning(bin)) { + const binSignal = getBinSignalName(model, field, bin); // TODO: extract this to be range step signal + + return new SignalRefWrapper(() => { + const updatedName = model.getSignalName(binSignal); + const binCount = `(${updatedName}.stop - ${updatedName}.start) / ${updatedName}.step`; + return `${model.getSignalName(sizeSignal)} / (${binCount})`; + }); + } } return undefined; } /** @@ -19530,11 +19589,11 @@ switch (property) { case 'range': { const range = specifiedScale.range; - if (isArray(range)) { + if (vegaUtil.isArray(range)) { if (isXorY(channel)) { return makeExplicit(range.map(v => { if (v === 'width' || v === 'height') { // get signal for width/height // Just like default range logic below, we use SignalRefWrapper to account for potential merges and renames. @@ -19544,11 +19603,11 @@ } return v; })); } - } else if (isObject(range)) { + } else if (vegaUtil.isObject(range)) { return makeExplicit({ data: model.requestDataName(DataSourceType.Main), field: range.field, sort: { op: 'min', @@ -19587,12 +19646,12 @@ rangeMax } = specifiedScale; const d = defaultRange(channel, model); if ((rangeMin !== undefined || rangeMax !== undefined) && // it's ok to check just rangeMin's compatibility since rangeMin/rangeMax are the same - scaleTypeSupportProperty(scaleType, 'rangeMin') && isArray(d) && d.length === 2) { - return makeExplicit([rangeMin !== null && rangeMin !== void 0 ? rangeMin : d[0], rangeMax !== null && rangeMax !== void 0 ? rangeMax : d[1]]); + scaleTypeSupportProperty(scaleType, 'rangeMin') && vegaUtil.isArray(d) && d.length === 2) { + return makeExplicit([rangeMin ?? d[0], rangeMax ?? d[1]]); } return makeImplicit(d); } @@ -19648,11 +19707,11 @@ } } // If step is null, use zero to width or height. // Note that we use SignalRefWrapper to account for potential merges and renames. - const sizeType = getSizeChannel$1(channel); + const sizeType = getSizeChannel(channel); const sizeSignal = model.getName(sizeType); if (channel === Y && hasContinuousDomain(scaleType)) { // For y continuous scale, we have to start from the height as the bottom part has the max value. return [SignalRefWrapper.fromName(getSignalName, sizeSignal), 0]; @@ -19687,11 +19746,11 @@ { // max radius = half od min(width,height) return [0, new SignalRefWrapper(() => { const w = model.getSignalName('width'); const h = model.getSignalName('height'); - return "min(".concat(w, ",").concat(h, ")/2"); + return `min(${w},${h})/2`; })]; } case STROKEWIDTH: // TODO: support custom rangeMin, rangeMax @@ -19725,11 +19784,11 @@ return [config.scale.minOpacity, config.scale.maxOpacity]; } /* istanbul ignore next: should never reach here */ - throw new Error("Scale range undefined for channel ".concat(channel)); + throw new Error(`Scale range undefined for channel ${channel}`); } function defaultContinuousToDiscreteCount(scaleType, config, domain, channel) { switch (scaleType) { case 'quantile': @@ -19737,11 +19796,11 @@ case 'quantize': return config.scale.quantizeCount; case 'threshold': - if (domain !== undefined && isArray(domain)) { + if (domain !== undefined && vegaUtil.isArray(domain)) { return domain.length + 1; } else { warn(domainRequiredForThresholdScale(channel)); // default threshold boundaries for threshold scale since domain has cardinality of 2 return 3; @@ -19760,12 +19819,12 @@ function interpolateRange(rangeMin, rangeMax, cardinality) { // always return a signal since it's better to compute the sequence in Vega later const f = () => { const rMax = signalOrStringValue(rangeMax); const rMin = signalOrStringValue(rangeMin); - const step = "(".concat(rMax, " - ").concat(rMin, ") / (").concat(cardinality, " - 1)"); - return "sequence(".concat(rMin, ", ").concat(rMax, " + ").concat(step, ", ").concat(step, ")"); + const step = `(${rMax} - ${rMin}) / (${cardinality} - 1)`; + return `sequence(${rMin}, ${rMax} + ${step}, ${step})`; }; if (isSignalRef(rangeMax)) { return new SignalRefWrapper(f); } else { @@ -19777,11 +19836,11 @@ function sizeRangeMin(mark, zero, config) { if (zero) { if (isSignalRef(zero)) { return { - signal: "".concat(zero.signal, " ? 0 : ").concat(sizeRangeMin(mark, false, config)) + signal: `${zero.signal} ? 0 : ${sizeRangeMin(mark, false, config)}` }; } else { return 0; } } @@ -19827,14 +19886,14 @@ return config.scale.maxBandSize; } const min = minXYStep(size, xyStepSignals, config.view); - if (isNumber(min)) { + if (vegaUtil.isNumber(min)) { return min - 1; } else { - return new SignalRefWrapper(() => "".concat(min.signal, " - 1")); + return new SignalRefWrapper(() => `${min.signal} - 1`); } } case 'line': case 'trail': @@ -19852,14 +19911,14 @@ return config.scale.maxSize; } const pointStep = minXYStep(size, xyStepSignals, config.view); - if (isNumber(pointStep)) { + if (vegaUtil.isNumber(pointStep)) { return Math.pow(MAX_SIZE_RANGE_STEP_RATIO * pointStep, 2); } else { - return new SignalRefWrapper(() => "pow(".concat(MAX_SIZE_RANGE_STEP_RATIO, " * ").concat(pointStep.signal, ", 2)")); + return new SignalRefWrapper(() => `pow(${MAX_SIZE_RANGE_STEP_RATIO} * ${pointStep.signal}, 2)`); } } } /* istanbul ignore next: should never reach here */ // sizeRangeMax not implemented for the mark @@ -19877,11 +19936,11 @@ const heightStep = isStep(size.height) ? size.height.step : getViewConfigDiscreteStep(viewConfig, 'height'); if (xyStepSignals.x || xyStepSignals.y) { return new SignalRefWrapper(() => { const exprs = [xyStepSignals.x ? xyStepSignals.x.signal : widthStep, xyStepSignals.y ? xyStepSignals.y.signal : heightStep]; - return "min(".concat(exprs.join(', '), ")"); + return `min(${exprs.join(', ')})`; }); } return Math.min(widthStep, heightStep); } @@ -19981,12 +20040,13 @@ fieldOrDatumDef }) => interpolate(channel, fieldOrDatumDef.type), nice: ({ scaleType, channel, + domain, fieldOrDatumDef - }) => nice(scaleType, channel, fieldOrDatumDef), + }) => nice(scaleType, channel, domain, fieldOrDatumDef), padding: ({ channel, scaleType, fieldOrDatumDef, markDef, @@ -20019,11 +20079,11 @@ channel, fieldOrDatumDef, domain, markDef, scaleType - }) => zero$1(channel, fieldOrDatumDef, domain, markDef, scaleType) + }) => zero(channel, fieldOrDatumDef, domain, markDef, scaleType) }; // This method is here rather than in range.ts to avoid circular dependency. function parseScaleRange(model) { if (isUnitModel(model)) { parseUnitScaleRange(model); @@ -20092,14 +20152,14 @@ return 'hcl'; } return undefined; } - function nice(scaleType, channel, fieldOrDatumDef) { + function nice(scaleType, channel, specifiedDomain, fieldOrDatumDef) { var _getFieldDef; - if (((_getFieldDef = getFieldDef(fieldOrDatumDef)) === null || _getFieldDef === void 0 ? void 0 : _getFieldDef.bin) || contains([ScaleType.TIME, ScaleType.UTC], scaleType)) { + if ((_getFieldDef = getFieldDef(fieldOrDatumDef)) !== null && _getFieldDef !== void 0 && _getFieldDef.bin || vegaUtil.isArray(specifiedDomain) || contains([ScaleType.TIME, ScaleType.UTC], scaleType)) { return undefined; } return channel in POSITION_SCALE_CHANNEL_INDEX ? true : undefined; } @@ -20166,11 +20226,11 @@ /* By default, paddingOuter is paddingInner / 2. The reason is that size (width/height) = step * (cardinality - paddingInner + 2 * paddingOuter). and we want the width/height to be integer by default. Note that step (by default) and cardinality are integers.) */ isSignalRef(paddingInnerValue) ? { - signal: "".concat(paddingInnerValue.signal, "/2") + signal: `${paddingInnerValue.signal}/2` } : paddingInnerValue / 2); } } return undefined; @@ -20178,11 +20238,11 @@ function reverse(scaleType, sort, channel, scaleConfig) { if (channel === 'x' && scaleConfig.xReverse !== undefined) { if (hasContinuousDomain(scaleType) && sort === 'descending') { if (isSignalRef(scaleConfig.xReverse)) { return { - signal: "!".concat(scaleConfig.xReverse.signal) + signal: `!${scaleConfig.xReverse.signal}` }; } else { return !scaleConfig.xReverse; } } @@ -20196,17 +20256,17 @@ return true; } return undefined; } - function zero$1(channel, fieldDef, specifiedDomain, markDef, scaleType) { - // If users explicitly provide a domain range, we should not augment zero as that will be unexpected. + function zero(channel, fieldDef, specifiedDomain, markDef, scaleType) { + // If users explicitly provide a domain, we should not augment zero as that will be unexpected. const hasCustomDomain = !!specifiedDomain && specifiedDomain !== 'unaggregated'; if (hasCustomDomain) { if (hasContinuousDomain(scaleType)) { - if (isArray(specifiedDomain)) { + if (vegaUtil.isArray(specifiedDomain)) { const first = specifiedDomain[0]; const last = specifiedDomain[specifiedDomain.length - 1]; if (first <= 0 && last >= 0) { // if the domain includes zero, make zero remains true @@ -20251,11 +20311,11 @@ * Determine if there is a specified scale type and if it is appropriate, * or determine default type if type is unspecified or inappropriate. */ // NOTE: CompassQL uses this method. function scaleType(specifiedScale, channel, fieldDef, mark) { - const defaultScaleType = defaultType$2(channel, fieldDef, mark); + const defaultScaleType = defaultType(channel, fieldDef, mark); const { type } = specifiedScale; if (!isScaleChannel(channel)) { @@ -20284,40 +20344,48 @@ /** * Determine appropriate default scale type. */ // NOTE: Voyager uses this method. - function defaultType$2(channel, fieldDef, mark) { - var _fieldDef$axis; - + function defaultType(channel, fieldDef, mark) { switch (fieldDef.type) { case 'nominal': case 'ordinal': - if (isColorChannel(channel) || rangeType(channel) === 'discrete') { - if (channel === 'shape' && fieldDef.type === 'ordinal') { - warn(discreteChannelCannotEncode(channel, 'ordinal')); + { + var _fieldDef$axis; + + if (isColorChannel(channel) || rangeType(channel) === 'discrete') { + if (channel === 'shape' && fieldDef.type === 'ordinal') { + warn(discreteChannelCannotEncode(channel, 'ordinal')); + } + + return 'ordinal'; } - return 'ordinal'; - } + if (channel in POSITION_SCALE_CHANNEL_INDEX) { + if (contains(['rect', 'bar', 'image', 'rule'], mark.type)) { + // The rect/bar mark should fit into a band. + // For rule, using band scale to make rule align with axis ticks better https://github.com/vega/vega-lite/issues/3429 + return 'band'; + } + } else if (mark.type === 'arc' && channel in POLAR_POSITION_SCALE_CHANNEL_INDEX) { + return 'band'; + } - if (channel in POSITION_SCALE_CHANNEL_INDEX) { - if (contains(['rect', 'bar', 'image', 'rule'], mark)) { - // The rect/bar mark should fit into a band. - // For rule, using band scale to make rule align with axis ticks better https://github.com/vega/vega-lite/issues/3429 + const dimensionSize = mark[getSizeChannel$1(channel)]; + + if (isRelativeBandSize(dimensionSize)) { return 'band'; } - } else if (mark === 'arc' && channel in POLAR_POSITION_SCALE_CHANNEL_INDEX) { - return 'band'; - } - if (fieldDef.band !== undefined || isPositionFieldOrDatumDef(fieldDef) && ((_fieldDef$axis = fieldDef.axis) === null || _fieldDef$axis === void 0 ? void 0 : _fieldDef$axis.tickBand)) { - return 'band'; - } // Otherwise, use ordinal point scale so we can easily get center positions of the marks. + if (isPositionFieldOrDatumDef(fieldDef) && (_fieldDef$axis = fieldDef.axis) !== null && _fieldDef$axis !== void 0 && _fieldDef$axis.tickBand) { + return 'band'; + } // Otherwise, use ordinal point scale so we can easily get center positions of the marks. - return 'point'; + return 'point'; + } case 'temporal': if (isColorChannel(channel)) { return 'time'; } else if (rangeType(channel) === 'discrete') { @@ -20381,11 +20449,12 @@ */ function parseUnitScaleCore(model) { const { encoding, - mark + mark, + markDef } = model; return SCALE_CHANNELS.reduce((scaleComponents, channel) => { const fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]); // must be typed def to have scale // Don't generate scale for shape of geoshape @@ -20394,15 +20463,13 @@ } let specifiedScale = fieldOrDatumDef && fieldOrDatumDef['scale']; if (fieldOrDatumDef && specifiedScale !== null && specifiedScale !== false) { - var _specifiedScale; - - specifiedScale = (_specifiedScale = specifiedScale) !== null && _specifiedScale !== void 0 ? _specifiedScale : {}; - const sType = scaleType(specifiedScale, channel, fieldOrDatumDef, mark); - scaleComponents[channel] = new ScaleComponent(model.scaleName(channel + '', true), { + specifiedScale ?? (specifiedScale = {}); + const sType = scaleType(specifiedScale, channel, fieldOrDatumDef, markDef); + scaleComponents[channel] = new ScaleComponent(model.scaleName(`${channel}`, true), { value: sType, explicit: specifiedScale.type === sType }); } @@ -20419,14 +20486,14 @@ for (const child of model.children) { parseScaleCore(child); // Instead of always merging right away -- check if it is compatible to merge first! for (const channel of keys(child.component.scales)) { - var _resolve$scale$channe; + var _resolve$scale; // if resolve is undefined, set default first - resolve.scale[channel] = (_resolve$scale$channe = resolve.scale[channel]) !== null && _resolve$scale$channe !== void 0 ? _resolve$scale$channe : defaultScaleResolve(channel, model); + (_resolve$scale = resolve.scale)[channel] ?? (_resolve$scale[channel] = defaultScaleResolve(channel, model)); if (resolve.scale[channel] === 'shared') { const explicitScaleType = scaleTypeWithExplicitIndex[channel]; const childScaleType = child.component.scales[channel].getWithExplicit('type'); @@ -20520,12 +20587,10 @@ /** Name map for projections, which can be renamed by a model's parent. */ /** Name map for signals, which can be renamed by a model's parent. */ constructor(spec, type, parent, parentGivenName, config, resolve, view) { - var _spec$name, _spec$transform; - this.type = type; this.parent = parent; this.config = config; _defineProperty(this, "name", void 0); @@ -20550,42 +20615,44 @@ _defineProperty(this, "component", void 0); _defineProperty(this, "view", void 0); - _defineProperty(this, "children", []); + _defineProperty(this, "children", void 0); _defineProperty(this, "correctDataNames", mark => { + var _mark$from, _mark$from2, _mark$from2$facet; + // TODO: make this correct // for normal data references - if (mark.from && mark.from.data) { + if ((_mark$from = mark.from) !== null && _mark$from !== void 0 && _mark$from.data) { mark.from.data = this.lookupDataSource(mark.from.data); } // for access to facet data - if (mark.from && mark.from.facet && mark.from.facet.data) { + if ((_mark$from2 = mark.from) !== null && _mark$from2 !== void 0 && (_mark$from2$facet = _mark$from2.facet) !== null && _mark$from2$facet !== void 0 && _mark$from2$facet.data) { mark.from.facet.data = this.lookupDataSource(mark.from.facet.data); } return mark; }); this.parent = parent; this.config = config; - this.view = replaceExprRefInIndex(view); // If name is not provided, always use parent's givenName to avoid name conflicts. + this.view = replaceExprRef(view); // If name is not provided, always use parent's givenName to avoid name conflicts. - this.name = (_spec$name = spec.name) !== null && _spec$name !== void 0 ? _spec$name : parentGivenName; + this.name = spec.name ?? parentGivenName; this.title = isText(spec.title) ? { text: spec.title - } : spec.title ? this.initTitle(spec.title) : undefined; // Shared name maps + } : spec.title ? replaceExprRef(spec.title) : undefined; // Shared name maps this.scaleNameMap = parent ? parent.scaleNameMap : new NameMap(); this.projectionNameMap = parent ? parent.projectionNameMap : new NameMap(); this.signalNameMap = parent ? parent.signalNameMap : new NameMap(); this.data = spec.data; this.description = spec.description; - this.transforms = normalizeTransform((_spec$transform = spec.transform) !== null && _spec$transform !== void 0 ? _spec$transform : []); + this.transforms = normalizeTransform(spec.transform ?? []); this.layout = type === 'layer' || type === 'unit' ? {} : extractCompositionLayout(spec, type, config); this.component = { data: { sources: parent ? parent.component.data.sources : [], outputNodes: parent ? parent.component.data.outputNodes : {}, @@ -20612,23 +20679,10 @@ axes: {}, legends: {} }; } - initTitle(title) { - const props = keys(title); - const titleInternal = { - text: signalRefOrValue(title.text) - }; - - for (const prop of props) { - titleInternal[prop] = signalRefOrValue(title[prop]); - } - - return titleInternal; - } - get width() { return this.getSizeSignalRef('width'); } get height() { @@ -20676,20 +20730,10 @@ parseLegends() { parseLegend(this); } - assembleGroupStyle() { - if (this.type === 'unit' || this.type === 'layer') { - var _this$view$style, _this$view; - - return (_this$view$style = (_this$view = this.view) === null || _this$view === void 0 ? void 0 : _this$view.style) !== null && _this$view$style !== void 0 ? _this$view$style : 'cell'; - } - - return undefined; - } - assembleEncodeFromView(view) { // Exclude "style" const { style: _, ...baseView @@ -20721,16 +20765,14 @@ } // For top-level spec, we can set the global width and height signal to adjust the group size. // For other child specs, we have to manually set width and height in the encode entry. if (this.type === 'unit' || this.type === 'layer') { - var _encodeEntry; - return { width: this.getSizeSignalRef('width'), height: this.getSizeSignalRef('height'), - ...((_encodeEntry = encodeEntry) !== null && _encodeEntry !== void 0 ? _encodeEntry : {}) + ...(encodeEntry ?? {}) }; } } return isEmpty(encodeEntry) ? undefined : encodeEntry; @@ -20794,17 +20836,15 @@ assembleProjections() { return assembleProjections(this); } assembleTitle() { - var _this$title; - const { encoding, ...titleNoEncoding - } = (_this$title = this.title) !== null && _this$title !== void 0 ? _this$title : {}; - const title = { ...extractTitleConfig(this.config.title).nonMark, + } = this.title ?? {}; + const title = { ...extractTitleConfig(this.config.title).nonMarkTitleProperties, ...titleNoEncoding, ...(encoding ? { encode: { update: encoding } @@ -20813,21 +20853,17 @@ if (title.text) { if (contains(['unit', 'layer'], this.type)) { // Unit/Layer if (contains(['middle', undefined], title.anchor)) { - var _title$frame; - - title.frame = (_title$frame = title.frame) !== null && _title$frame !== void 0 ? _title$frame : 'group'; + title.frame ?? (title.frame = 'group'); } } else { - var _title$anchor; - // composition with Vega layout // Set title = "start" by default for composition as "middle" does not look nice // https://github.com/vega/vega/issues/960#issuecomment-471360328 - title.anchor = (_title$anchor = title.anchor) !== null && _title$anchor !== void 0 ? _title$anchor : 'start'; + title.anchor ?? (title.anchor = 'start'); } return isEmpty(title) ? undefined : title; } @@ -20875,11 +20911,11 @@ return group; } getName(text) { - return varName((this.name ? this.name + '_' : '') + text); + return varName((this.name ? `${this.name}_` : '') + text); } getDataName(type) { return this.getName(DataSourceType[type].toLowerCase()); } @@ -21109,32 +21145,28 @@ clone() { return new DensityTransformNode(null, duplicate(this.transform)); } constructor(parent, transform) { - var _this$transform$as, _specifiedAs$, _specifiedAs$2; - super(parent); this.transform = transform; this.transform = duplicate(transform); // duplicate to prevent side effects - const specifiedAs = (_this$transform$as = this.transform.as) !== null && _this$transform$as !== void 0 ? _this$transform$as : [undefined, undefined]; - this.transform.as = [(_specifiedAs$ = specifiedAs[0]) !== null && _specifiedAs$ !== void 0 ? _specifiedAs$ : 'value', (_specifiedAs$2 = specifiedAs[1]) !== null && _specifiedAs$2 !== void 0 ? _specifiedAs$2 : 'density']; + const specifiedAs = this.transform.as ?? [undefined, undefined]; + this.transform.as = [specifiedAs[0] ?? 'value', specifiedAs[1] ?? 'density']; } dependentFields() { - var _this$transform$group; - - return new Set([this.transform.density, ...((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : [])]); + return new Set([this.transform.density, ...(this.transform.groupby ?? [])]); } producedFields() { return new Set(this.transform.as); } hash() { - return "DensityTransform ".concat(hash(this.transform)); + return `DensityTransform ${hash(this.transform)}`; } assemble() { const { density, @@ -21203,11 +21235,11 @@ producedFields() { return new Set(); // filter does not produce any new fields } hash() { - return "FilterInvalid ".concat(hash(this.filter)); + return `FilterInvalid ${hash(this.filter)}`; } /** * Create the VgTransforms for each of the filtered fields. */ @@ -21219,14 +21251,14 @@ expr: 'datum' }); if (fieldDef !== null) { if (fieldDef.type === 'temporal') { - vegaFilters.push("(isDate(".concat(ref, ") || (isValid(").concat(ref, ") && isFinite(+").concat(ref, ")))")); + vegaFilters.push(`(isDate(${ref}) || (isValid(${ref}) && isFinite(+${ref})))`); } else if (fieldDef.type === 'quantitative') { - vegaFilters.push("isValid(".concat(ref, ")")); - vegaFilters.push("isFinite(+".concat(ref, ")")); + vegaFilters.push(`isValid(${ref})`); + vegaFilters.push(`isFinite(+${ref})`); } else ; } return vegaFilters; }, []); @@ -21254,15 +21286,11 @@ const { flatten, as = [] } = this.transform; - this.transform.as = flatten.map((f, i) => { - var _as$i; - - return (_as$i = as[i]) !== null && _as$i !== void 0 ? _as$i : f; - }); + this.transform.as = flatten.map((f, i) => as[i] ?? f); } dependentFields() { return new Set(this.transform.flatten); } @@ -21270,11 +21298,11 @@ producedFields() { return new Set(this.transform.as); } hash() { - return "FlattenTransform ".concat(hash(this.transform)); + return `FlattenTransform ${hash(this.transform)}`; } assemble() { const { flatten: fields, @@ -21298,18 +21326,16 @@ clone() { return new FoldTransformNode(null, duplicate(this.transform)); } constructor(parent, transform) { - var _this$transform$as, _specifiedAs$, _specifiedAs$2; - super(parent); this.transform = transform; this.transform = duplicate(transform); // duplicate to prevent side effects - const specifiedAs = (_this$transform$as = this.transform.as) !== null && _this$transform$as !== void 0 ? _this$transform$as : [undefined, undefined]; - this.transform.as = [(_specifiedAs$ = specifiedAs[0]) !== null && _specifiedAs$ !== void 0 ? _specifiedAs$ : 'key', (_specifiedAs$2 = specifiedAs[1]) !== null && _specifiedAs$2 !== void 0 ? _specifiedAs$2 : 'value']; + const specifiedAs = this.transform.as ?? [undefined, undefined]; + this.transform.as = [specifiedAs[0] ?? 'key', specifiedAs[1] ?? 'value']; } dependentFields() { return new Set(this.transform.fold); } @@ -21317,11 +21343,11 @@ producedFields() { return new Set(this.transform.as); } hash() { - return "FoldTransform ".concat(hash(this.transform)); + return `FoldTransform ${hash(this.transform)}`; } assemble() { const { fold, @@ -21351,26 +21377,26 @@ for (const coordinates of [[LONGITUDE, LATITUDE], [LONGITUDE2, LATITUDE2]]) { const pair = coordinates.map(channel => { const def = getFieldOrDatumDef(model.encoding[channel]); return isFieldDef(def) ? def.field : isDatumDef(def) ? { - expr: "".concat(def.datum) + expr: `${def.datum}` } : isValueDef(def) ? { - expr: "".concat(def['value']) + expr: `${def['value']}` } : undefined; }); if (pair[0] || pair[1]) { - parent = new GeoJSONNode(parent, pair, null, model.getName("geojson_".concat(geoJsonCounter++))); + parent = new GeoJSONNode(parent, pair, null, model.getName(`geojson_${geoJsonCounter++}`)); } } if (model.channelHasField(SHAPE)) { const fieldDef = model.typedFieldDef(SHAPE); if (fieldDef.type === GEOJSON) { - parent = new GeoJSONNode(parent, null, fieldDef.field, model.getName("geojson_".concat(geoJsonCounter++))); + parent = new GeoJSONNode(parent, null, fieldDef.field, model.getName(`geojson_${geoJsonCounter++}`)); } } return parent; } @@ -21381,35 +21407,36 @@ this.geojson = geojson; this.signal = signal; } dependentFields() { - var _this$fields; - - const fields = ((_this$fields = this.fields) !== null && _this$fields !== void 0 ? _this$fields : []).filter(isString); + const fields = (this.fields ?? []).filter(vegaUtil.isString); return new Set([...(this.geojson ? [this.geojson] : []), ...fields]); } producedFields() { return new Set(); } hash() { - return "GeoJSON ".concat(this.geojson, " ").concat(this.signal, " ").concat(hash(this.fields)); + return `GeoJSON ${this.geojson} ${this.signal} ${hash(this.fields)}`; } assemble() { - return { + return [...(this.geojson ? [{ + type: 'filter', + expr: `isValid(datum["${this.geojson}"])` + }] : []), { type: 'geojson', ...(this.fields ? { fields: this.fields } : {}), ...(this.geojson ? { geojson: this.geojson } : {}), signal: this.signal - }; + }]; } } class GeoPointNode extends DataFlowNode { @@ -21431,35 +21458,35 @@ for (const coordinates of [[LONGITUDE, LATITUDE], [LONGITUDE2, LATITUDE2]]) { const pair = coordinates.map(channel => { const def = getFieldOrDatumDef(model.encoding[channel]); return isFieldDef(def) ? def.field : isDatumDef(def) ? { - expr: "".concat(def.datum) + expr: `${def.datum}` } : isValueDef(def) ? { - expr: "".concat(def['value']) + expr: `${def['value']}` } : undefined; }); const suffix = coordinates[0] === LONGITUDE2 ? '2' : ''; if (pair[0] || pair[1]) { - parent = new GeoPointNode(parent, model.projectionName(), pair, [model.getName('x' + suffix), model.getName('y' + suffix)]); + parent = new GeoPointNode(parent, model.projectionName(), pair, [model.getName(`x${suffix}`), model.getName(`y${suffix}`)]); } } return parent; } dependentFields() { - return new Set(this.fields.filter(isString)); + return new Set(this.fields.filter(vegaUtil.isString)); } producedFields() { return new Set(this.as); } hash() { - return "Geopoint ".concat(this.projection, " ").concat(hash(this.fields), " ").concat(hash(this.as)); + return `Geopoint ${this.projection} ${hash(this.fields)} ${hash(this.as)}`; } assemble() { return { type: 'geopoint', @@ -21480,13 +21507,11 @@ super(parent); this.transform = transform; } dependentFields() { - var _this$transform$group; - - return new Set([this.transform.impute, this.transform.key, ...((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : [])]); + return new Set([this.transform.impute, this.transform.key, ...(this.transform.groupby ?? [])]); } producedFields() { return new Set([this.transform.impute]); } @@ -21497,11 +21522,11 @@ stop, step } = keyvals; const result = [start, stop, ...(step ? [step] : [])].join(','); return { - signal: "sequence(".concat(result, ")") + signal: `sequence(${result})` }; } static makeFromTransform(parent, imputeTransform) { return new ImputeNode(parent, imputeTransform); @@ -21550,11 +21575,11 @@ return null; } hash() { - return "Impute ".concat(hash(this.transform)); + return `Impute ${hash(this.transform)}`; } assemble() { const { impute, @@ -21580,22 +21605,22 @@ }; if (method && method !== 'value') { const deriveNewField = { type: 'window', - as: ["imputed_".concat(impute, "_value")], + as: [`imputed_${impute}_value`], ops: [method], fields: [impute], frame, ignorePeers: false, ...(groupby ? { groupby } : {}) }; const replaceOriginal = { type: 'formula', - expr: "datum.".concat(impute, " === null ? datum.imputed_").concat(impute, "_value : datum.").concat(impute), + expr: `datum.${impute} === null ? datum.imputed_${impute}_value : datum.${impute}`, as: impute }; return [imputeTransform, deriveNewField, replaceOriginal]; } else { return [imputeTransform]; @@ -21612,32 +21637,28 @@ clone() { return new LoessTransformNode(null, duplicate(this.transform)); } constructor(parent, transform) { - var _this$transform$as, _specifiedAs$, _specifiedAs$2; - super(parent); this.transform = transform; this.transform = duplicate(transform); // duplicate to prevent side effects - const specifiedAs = (_this$transform$as = this.transform.as) !== null && _this$transform$as !== void 0 ? _this$transform$as : [undefined, undefined]; - this.transform.as = [(_specifiedAs$ = specifiedAs[0]) !== null && _specifiedAs$ !== void 0 ? _specifiedAs$ : transform.on, (_specifiedAs$2 = specifiedAs[1]) !== null && _specifiedAs$2 !== void 0 ? _specifiedAs$2 : transform.loess]; + const specifiedAs = this.transform.as ?? [undefined, undefined]; + this.transform.as = [specifiedAs[0] ?? transform.on, specifiedAs[1] ?? transform.loess]; } dependentFields() { - var _this$transform$group; - - return new Set([this.transform.loess, this.transform.on, ...((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : [])]); + return new Set([this.transform.loess, this.transform.on, ...(this.transform.groupby ?? [])]); } producedFields() { return new Set(this.transform.as); } hash() { - return "LoessTransform ".concat(hash(this.transform)); + return `LoessTransform ${hash(this.transform)}`; } assemble() { const { loess, @@ -21679,21 +21700,29 @@ if (!fromSource) { fromSource = new SourceNode(from.data); sources.push(fromSource); } - const fromOutputName = model.getName("lookup_".concat(counter)); + const fromOutputName = model.getName(`lookup_${counter}`); fromOutputNode = new OutputNode(fromSource, fromOutputName, DataSourceType.Lookup, model.component.data.outputNodeRefCounts); model.component.data.outputNodes[fromOutputName] = fromOutputNode; } else if (isLookupSelection(from)) { - const selName = from.selection; + const selName = from.param; transform = { as: selName, ...transform }; - fromOutputNode = model.getSelectionComponent(varName(selName), selName).materialized; + let selCmpt; + try { + selCmpt = model.getSelectionComponent(varName(selName), selName); + } catch (e) { + throw new Error(cannotLookupVariableParameter(selName)); + } + + fromOutputNode = selCmpt.materialized; + if (!fromOutputNode) { throw new Error(noSameUnitLookup(selName)); } } @@ -21703,36 +21732,36 @@ dependentFields() { return new Set([this.transform.lookup]); } producedFields() { - return new Set(this.transform.as ? array(this.transform.as) : this.transform.from.fields); + return new Set(this.transform.as ? vegaUtil.array(this.transform.as) : this.transform.from.fields); } hash() { - return "Lookup ".concat(hash({ - transform: this.transform, - secondary: this.secondary - })); + return `Lookup ${hash({ + transform: this.transform, + secondary: this.secondary + })}`; } assemble() { let foreign; if (this.transform.from.fields) { // lookup a few fields and add create a flat output foreign = { values: this.transform.from.fields, ...(this.transform.as ? { - as: array(this.transform.as) + as: vegaUtil.array(this.transform.as) } : {}) }; } else { // lookup full record and nest it let asName = this.transform.as; - if (!isString(asName)) { + if (!vegaUtil.isString(asName)) { warn(NO_FIELDS_NEEDS_AS); asName = '_lookup'; } foreign = { @@ -21762,32 +21791,28 @@ clone() { return new QuantileTransformNode(null, duplicate(this.transform)); } constructor(parent, transform) { - var _this$transform$as, _specifiedAs$, _specifiedAs$2; - super(parent); this.transform = transform; this.transform = duplicate(transform); // duplicate to prevent side effects - const specifiedAs = (_this$transform$as = this.transform.as) !== null && _this$transform$as !== void 0 ? _this$transform$as : [undefined, undefined]; - this.transform.as = [(_specifiedAs$ = specifiedAs[0]) !== null && _specifiedAs$ !== void 0 ? _specifiedAs$ : 'prob', (_specifiedAs$2 = specifiedAs[1]) !== null && _specifiedAs$2 !== void 0 ? _specifiedAs$2 : 'value']; + const specifiedAs = this.transform.as ?? [undefined, undefined]; + this.transform.as = [specifiedAs[0] ?? 'prob', specifiedAs[1] ?? 'value']; } dependentFields() { - var _this$transform$group; - - return new Set([this.transform.quantile, ...((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : [])]); + return new Set([this.transform.quantile, ...(this.transform.groupby ?? [])]); } producedFields() { return new Set(this.transform.as); } hash() { - return "QuantileTransform ".concat(hash(this.transform)); + return `QuantileTransform ${hash(this.transform)}`; } assemble() { const { quantile, @@ -21811,32 +21836,28 @@ clone() { return new RegressionTransformNode(null, duplicate(this.transform)); } constructor(parent, transform) { - var _this$transform$as, _specifiedAs$, _specifiedAs$2; - super(parent); this.transform = transform; this.transform = duplicate(transform); // duplicate to prevent side effects - const specifiedAs = (_this$transform$as = this.transform.as) !== null && _this$transform$as !== void 0 ? _this$transform$as : [undefined, undefined]; - this.transform.as = [(_specifiedAs$ = specifiedAs[0]) !== null && _specifiedAs$ !== void 0 ? _specifiedAs$ : transform.on, (_specifiedAs$2 = specifiedAs[1]) !== null && _specifiedAs$2 !== void 0 ? _specifiedAs$2 : transform.regression]; + const specifiedAs = this.transform.as ?? [undefined, undefined]; + this.transform.as = [specifiedAs[0] ?? transform.on, specifiedAs[1] ?? transform.regression]; } dependentFields() { - var _this$transform$group; - - return new Set([this.transform.regression, this.transform.on, ...((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : [])]); + return new Set([this.transform.regression, this.transform.on, ...(this.transform.groupby ?? [])]); } producedFields() { return new Set(this.transform.as); } hash() { - return "RegressionTransform ".concat(hash(this.transform)); + return `RegressionTransform ${hash(this.transform)}`; } assemble() { const { regression, @@ -21867,27 +21888,23 @@ super(parent); this.transform = transform; } addDimensions(fields) { - var _this$transform$group; - - this.transform.groupby = unique(((_this$transform$group = this.transform.groupby) !== null && _this$transform$group !== void 0 ? _this$transform$group : []).concat(fields), d => d); + this.transform.groupby = unique((this.transform.groupby ?? []).concat(fields), d => d); } producedFields() { return undefined; // return undefined so that potentially everything can depend on the pivot } dependentFields() { - var _this$transform$group2; - - return new Set([this.transform.pivot, this.transform.value, ...((_this$transform$group2 = this.transform.groupby) !== null && _this$transform$group2 !== void 0 ? _this$transform$group2 : [])]); + return new Set([this.transform.pivot, this.transform.value, ...(this.transform.groupby ?? [])]); } hash() { - return "PivotTransform ".concat(hash(this.transform)); + return `PivotTransform ${hash(this.transform)}`; } assemble() { const { pivot, @@ -21935,11 +21952,11 @@ producedFields() { return new Set(); } hash() { - return "SampleTransform ".concat(hash(this.transform)); + return `SampleTransform ${hash(this.transform)}`; } assemble() { return { type: 'sample', @@ -21971,14 +21988,12 @@ } } if (node instanceof ParseNode) { if (node.parent instanceof SourceNode && !dataSource.source) { - var _dataSource$format; - // If node's parent is a root source and the data source does not refer to another data source, use normal format parse - dataSource.format = { ...((_dataSource$format = dataSource.format) !== null && _dataSource$format !== void 0 ? _dataSource$format : {}), + dataSource.format = { ...(dataSource.format ?? {}), parse: node.assembleFormatParse() }; // add calculates for all nested fields dataSource.transform.push(...node.assembleTransforms(true)); } else { @@ -21987,33 +22002,30 @@ } } if (node instanceof FacetNode) { if (!dataSource.name) { - dataSource.name = "data_".concat(datasetIndex++); + dataSource.name = `data_${datasetIndex++}`; } if (!dataSource.source || dataSource.transform.length > 0) { data.push(dataSource); node.data = dataSource.name; } else { node.data = dataSource.source; } - for (const d of node.assemble()) { - data.push(d); - } // break here because the rest of the tree has to be taken care of by the facet. + data.push(...node.assemble()); // break here because the rest of the tree has to be taken care of by the facet. - return; } - if (node instanceof GraticuleNode || node instanceof SequenceNode || node instanceof FilterInvalidNode || node instanceof FilterNode || node instanceof CalculateNode || node instanceof GeoPointNode || node instanceof GeoJSONNode || node instanceof AggregateNode || node instanceof LookupNode || node instanceof WindowTransformNode || node instanceof JoinAggregateTransformNode || node instanceof FoldTransformNode || node instanceof FlattenTransformNode || node instanceof DensityTransformNode || node instanceof LoessTransformNode || node instanceof QuantileTransformNode || node instanceof RegressionTransformNode || node instanceof IdentifierNode || node instanceof SampleTransformNode || node instanceof PivotTransformNode) { + if (node instanceof GraticuleNode || node instanceof SequenceNode || node instanceof FilterInvalidNode || node instanceof FilterNode || node instanceof CalculateNode || node instanceof GeoPointNode || node instanceof AggregateNode || node instanceof LookupNode || node instanceof WindowTransformNode || node instanceof JoinAggregateTransformNode || node instanceof FoldTransformNode || node instanceof FlattenTransformNode || node instanceof DensityTransformNode || node instanceof LoessTransformNode || node instanceof QuantileTransformNode || node instanceof RegressionTransformNode || node instanceof IdentifierNode || node instanceof SampleTransformNode || node instanceof PivotTransformNode) { dataSource.transform.push(node.assemble()); } - if (node instanceof BinNode || node instanceof TimeUnitNode || node instanceof ImputeNode || node instanceof StackNode) { + if (node instanceof BinNode || node instanceof TimeUnitNode || node instanceof ImputeNode || node instanceof StackNode || node instanceof GeoJSONNode) { dataSource.transform.push(...node.assemble()); } if (node instanceof OutputNode) { if (dataSource.source && dataSource.transform.length === 0) { @@ -22022,11 +22034,11 @@ // Note that an output node may be required but we still do not assemble a // separate data source for it. node.setSource(dataSource.name); } else { if (!dataSource.name) { - dataSource.name = "data_".concat(datasetIndex++); + dataSource.name = `data_${datasetIndex++}`; } // Here we set the name of the datasource we generated. From now on // other assemblers can use it. node.setSource(dataSource.name); // if this node has more than one child, we will add a datasource automatically @@ -22058,11 +22070,11 @@ break; default: { if (!dataSource.name) { - dataSource.name = "data_".concat(datasetIndex++); + dataSource.name = `data_${datasetIndex++}`; } let source = dataSource.name; if (!dataSource.source || dataSource.transform.length > 0) { @@ -22122,11 +22134,11 @@ let sourceIndex = 0; for (const root of dataComponent.sources) { // assign a name if the source does not have a name yet if (!root.hasName()) { - root.dataName = "source_".concat(sourceIndex++); + root.dataName = `source_${sourceIndex++}`; } const newData = root.assemble(); walkTree(root, newData); } // remove empty transform arrays for cleaner output @@ -22140,22 +22152,18 @@ let whereTo = 0; for (const [i, d] of data.entries()) { - var _d$transform; - - if (((_d$transform = d.transform) !== null && _d$transform !== void 0 ? _d$transform : []).length === 0 && !d.source) { + if ((d.transform ?? []).length === 0 && !d.source) { data.splice(whereTo++, 0, data.splice(i, 1)[0]); } } // now fix the from references in lookup transforms for (const d of data) { - for (const t of (_d$transform2 = d.transform) !== null && _d$transform2 !== void 0 ? _d$transform2 : []) { - var _d$transform2; - + for (const t of d.transform ?? []) { if (t.type === 'lookup') { t.from = dataComponent.outputNodes[t.from].getSource(); } } } // inline values for datasets that are in the datastore @@ -22205,22 +22213,21 @@ includeDefault: titleConfig === undefined || !!titleConfig }); if (child.component.layoutHeaders[channel].title) { // TODO: better handle multiline titles - title$1 = isArray(title$1) ? title$1.join(', ') : title$1; // merge title with child to produce "Title / Subtitle / Sub-subtitle" + title$1 = vegaUtil.isArray(title$1) ? title$1.join(', ') : title$1; // merge title with child to produce "Title / Subtitle / Sub-subtitle" - title$1 += ' / ' + child.component.layoutHeaders[channel].title; + title$1 += ` / ${child.component.layoutHeaders[channel].title}`; child.component.layoutHeaders[channel].title = null; } - const labelOrient = getHeaderProperty('labelOrient', fieldDef, config, channel); - const header = (_fieldDef$header = fieldDef.header) !== null && _fieldDef$header !== void 0 ? _fieldDef$header : {}; - const labels = getFirstDefined(header.labels, config.header.labels, true); + const labelOrient = getHeaderProperty('labelOrient', fieldDef.header, config, channel); + const labels = fieldDef.header !== null ? getFirstDefined((_fieldDef$header = fieldDef.header) === null || _fieldDef$header === void 0 ? void 0 : _fieldDef$header.labels, config.header.labels, true) : false; const headerType = contains(['bottom', 'right'], labelOrient) ? 'footer' : 'header'; component.layoutHeaders[channel] = { - title: title$1, + title: fieldDef.header !== null ? title$1 : null, facetFieldDef: fieldDef, [headerType]: channel === 'facet' ? [] : [makeHeaderComponent(model, channel, labels)] }; } } @@ -22250,14 +22257,12 @@ // For shared axis, move the axes to facet's header or footer const headerChannel = channel === 'x' ? 'column' : 'row'; const layoutHeader = layoutHeaders[headerChannel]; for (const axisComponent of child.component.axes[channel]) { - var _layoutHeader$headerT; - const headerType = getHeaderType(axisComponent.get('orient')); - layoutHeader[headerType] = (_layoutHeader$headerT = layoutHeader[headerType]) !== null && _layoutHeader$headerT !== void 0 ? _layoutHeader$headerT : [makeHeaderComponent(model, headerChannel, false)]; // FIXME: assemble shouldn't be called here, but we do it this way so we only extract the main part of the axes + layoutHeader[headerType] ?? (layoutHeader[headerType] = [makeHeaderComponent(model, headerChannel, false)]); // FIXME: assemble shouldn't be called here, but we do it this way so we only extract the main part of the axes const mainAxis = assembleAxis(axisComponent, 'main', model.config, { header: true }); @@ -22309,11 +22314,11 @@ const layoutSizeCmpt = model.component.layoutSize; let mergedSize; // Try to merge layout size for (const child of model.children) { const childSize = child.component.layoutSize.getWithExplicit(sizeType); - const scaleResolve = resolve.scale[channel]; + const scaleResolve = resolve.scale[channel] ?? defaultScaleResolve(channel, model); if (scaleResolve === 'independent' && childSize.value === 'step') { // Do not merge independent scales with range-step as their size depends // on the scale domains, which can be different between scales. mergedSize = undefined; @@ -22355,11 +22360,11 @@ size, component } = model; for (const channel of POSITION_SCALE_CHANNELS) { - const sizeType = getSizeChannel(channel); + const sizeType = getSizeChannel$1(channel); if (size[sizeType]) { const specifiedSize = size[sizeType]; component.layoutSize.set(sizeType, isStep(specifiedSize) ? 'step' : specifiedSize, true); } else { @@ -22399,12 +22404,12 @@ } } function facetSortFieldName(fieldDef, sort, opt) { return vgField(sort, { - suffix: "by_".concat(vgField(fieldDef)), - ...(opt !== null && opt !== void 0 ? opt : {}) + suffix: `by_${vgField(fieldDef)}`, + ...(opt ?? {}) }); } class FacetModel extends ModelWithField { constructor(spec, parent, parentGivenName, config) { super(spec, 'facet', parent, parentGivenName, config, spec.resolve); @@ -22430,11 +22435,11 @@ const channels = keys(facet); const normalizedFacet = {}; for (const channel of channels) { - if (!contains([ROW, COLUMN], channel)) { + if (![ROW, COLUMN].includes(channel)) { // Drop unsupported channel warn(incompatibleChannel(channel, 'facet')); break; } @@ -22450,20 +22455,18 @@ return normalizedFacet; } initFacetFieldDef(fieldDef, channel) { - const { - header, - ...rest - } = fieldDef; // Cast because we call initFieldDef, which assumes general FieldDef. + // Cast because we call initFieldDef, which assumes general FieldDef. // However, FacetFieldDef is a bit more constrained than the general FieldDef + const facetFieldDef = initFieldDef(fieldDef, channel); - const facetFieldDef = initFieldDef(rest, channel); - - if (header) { - facetFieldDef.header = replaceExprRefInIndex(header); + if (facetFieldDef.header) { + facetFieldDef.header = replaceExprRef(facetFieldDef.header); + } else if (facetFieldDef.header === null) { + facetFieldDef.header = null; } return facetFieldDef; } @@ -22526,36 +22529,30 @@ } = layoutHeaderComponent; if (facetFieldDef) { const titleOrient = getHeaderProperty('titleOrient', facetFieldDef.header, this.config, channel); - if (contains(['right', 'bottom'], titleOrient)) { - var _layoutMixins$titleAn; - + if (['right', 'bottom'].includes(titleOrient)) { const headerChannel = getHeaderChannel(channel, titleOrient); - layoutMixins.titleAnchor = (_layoutMixins$titleAn = layoutMixins.titleAnchor) !== null && _layoutMixins$titleAn !== void 0 ? _layoutMixins$titleAn : {}; + layoutMixins.titleAnchor ?? (layoutMixins.titleAnchor = {}); layoutMixins.titleAnchor[headerChannel] = 'end'; } } - if (headerComponent === null || headerComponent === void 0 ? void 0 : headerComponent[0]) { + if (headerComponent !== null && headerComponent !== void 0 && headerComponent[0]) { // set header/footerBand const sizeType = channel === 'row' ? 'height' : 'width'; const bandType = headerType === 'header' ? 'headerBand' : 'footerBand'; if (channel !== 'facet' && !this.child.component.layoutSize.get(sizeType)) { - var _layoutMixins$bandTyp; - // If facet child does not have size signal, then apply headerBand - layoutMixins[bandType] = (_layoutMixins$bandTyp = layoutMixins[bandType]) !== null && _layoutMixins$bandTyp !== void 0 ? _layoutMixins$bandTyp : {}; + layoutMixins[bandType] ?? (layoutMixins[bandType] = {}); layoutMixins[bandType][channel] = 0.5; } if (layoutHeaderComponent.title) { - var _layoutMixins$offset; - - layoutMixins.offset = (_layoutMixins$offset = layoutMixins.offset) !== null && _layoutMixins$offset !== void 0 ? _layoutMixins$offset : {}; + layoutMixins.offset ?? (layoutMixins.offset = {}); layoutMixins.offset[channel === 'row' ? 'rowTitle' : 'columnTitle'] = 10; } } } } @@ -22600,15 +22597,19 @@ return undefined; } else { // In facetNode.assemble(), the name is always this.getName('column') + '_layout'. const facetLayoutDataName = this.getName('column_domain'); return { - signal: "length(data('".concat(facetLayoutDataName, "'))") + signal: `length(data('${facetLayoutDataName}'))` }; } } + assembleGroupStyle() { + return undefined; + } + assembleGroup(signals) { if (this.parent && this.parent instanceof FacetModel) { // Provide number of columns for layout. // See discussion in https://github.com/vega/vega/issues/952 // and https://github.com/vega/vega-view/releases/tag/v1.2.6 @@ -22644,11 +22645,11 @@ if (this.child instanceof FacetModel) { if (this.child.channelHasField('column')) { const field = vgField(this.child.facet.column); fields.push(field); ops.push('distinct'); - as.push("distinct_".concat(field)); + as.push(`distinct_${field}`); } } else { for (const channel of POSITION_SCALE_CHANNELS) { const childScaleComponent = this.child.component.scales[channel]; @@ -22661,11 +22662,11 @@ const field = getFieldFromDomain(domain); if (field) { fields.push(field); ops.push('distinct'); - as.push("distinct_".concat(field)); + as.push(`distinct_${field}`); } else { warn(unknownField(channel)); } } } @@ -22728,11 +22729,11 @@ } else { fields.push(field); ops.push(op); as.push(outputName); } - } else if (isArray(sort)) { + } else if (vegaUtil.isArray(sort)) { const outputName = sortArrayIndexField(fieldDef, channel); fields.push(outputName); ops.push('max'); as.push(outputName); } @@ -22767,11 +22768,11 @@ if (fieldDef) { if (isSortField(fieldDef.sort)) { return [facetSortFieldName(fieldDef, fieldDef.sort, { expr: 'datum' })]; - } else if (isArray(fieldDef.sort)) { + } else if (vegaUtil.isArray(fieldDef.sort)) { return [sortArrayIndexField(fieldDef, channel, { expr: 'datum' })]; } @@ -22791,11 +22792,11 @@ if (fieldDef) { const { sort } = fieldDef; - const order = (isSortField(sort) ? sort.order : !isArray(sort) && sort) || 'ascending'; + const order = (isSortField(sort) ? sort.order : !vegaUtil.isArray(sort) && sort) || 'ascending'; return [order]; } return []; } @@ -22820,11 +22821,11 @@ if (facet[channel]) { var _facet$channel; const labelOrient = getHeaderProperty('labelOrient', (_facet$channel = facet[channel]) === null || _facet$channel === void 0 ? void 0 : _facet$channel.header, config, channel); - if (contains(ORTHOGONAL_ORIENT[channel], labelOrient)) { + if (ORTHOGONAL_ORIENT[channel].includes(labelOrient)) { // Row/Column with orthogonal labelOrient must use title to display labels return assembleLabelTitle(facet[channel], channel, config); } } } @@ -23010,14 +23011,12 @@ if (isCalculate(t)) { transformNode = head = new CalculateNode(head, t); derivedType = 'derived'; } else if (isFilter(t)) { - var _ParseNode$makeWithAn; - const implicit = getImplicitFromFilterTransform(t); - transformNode = head = (_ParseNode$makeWithAn = ParseNode.makeWithAncestors(head, {}, implicit, ancestorParse)) !== null && _ParseNode$makeWithAn !== void 0 ? _ParseNode$makeWithAn : head; + transformNode = head = ParseNode.makeWithAncestors(head, {}, implicit, ancestorParse) ?? head; head = new FilterNode(head, model, t.filter); } else if (isBin(t)) { transformNode = head = BinNode.makeFromTransform(head, t, model); derivedType = 'number'; } else if (isTimeUnit(t)) { @@ -23030,11 +23029,11 @@ }); ancestorParse.set(t.field, derivedType, false); } transformNode = head = TimeUnitNode.makeFromTransform(head, t); - } else if (isAggregate$1(t)) { + } else if (isAggregate(t)) { transformNode = head = AggregateNode.makeFromTransform(head, t); derivedType = 'number'; if (requiresSelectionId(model)) { head = new IdentifierNode(head); @@ -23081,13 +23080,11 @@ warn(invalidTransformIgnored(t)); continue; } if (transformNode && derivedType !== undefined) { - for (const field of (_transformNode$produc = transformNode.producedFields()) !== null && _transformNode$produc !== void 0 ? _transformNode$produc : []) { - var _transformNode$produc; - + for (const field of transformNode.producedFields() ?? []) { ancestorParse.set(field, derivedType, false); } } } @@ -23147,19 +23144,20 @@ v ...Child data... */ function parseData(model) { - var _data$format3, _ParseNode$makeExplic, _ParseNode$makeWithAn2; + var _data$format3; let head = parseRoot(model, model.component.data.sources); const { outputNodes, outputNodeRefCounts } = model.component.data; - const ancestorParse = model.parent ? model.parent.component.data.ancestorParse.clone() : new AncestorParse(); const data = model.data; + const newData = data && (isGenerator(data) || isUrlData(data) || isInlineData(data)); + const ancestorParse = !newData && model.parent ? model.parent.component.data.ancestorParse.clone() : new AncestorParse(); if (isGenerator(data)) { // insert generator transform if (isSequenceGenerator(data)) { head = new SequenceNode(head, data.sequence); @@ -23172,11 +23170,11 @@ } else if ((data === null || data === void 0 ? void 0 : (_data$format3 = data.format) === null || _data$format3 === void 0 ? void 0 : _data$format3.parse) === null) { // format.parse: null means disable parsing ancestorParse.parseNothing = true; } - head = (_ParseNode$makeExplic = ParseNode.makeExplicit(head, model, ancestorParse)) !== null && _ParseNode$makeExplic !== void 0 ? _ParseNode$makeExplic : head; // Default discrete selections require an identifer transform to + head = ParseNode.makeExplicit(head, model, ancestorParse) ?? head; // Default discrete selections require an identifer transform to // uniquely identify data points. Add this transform at the head of // the pipeline such that the identifier field is available for all // subsequent datasets. During optimization, we will remove this // transform if it proves to be unnecessary. Additional identifier // transforms will be necessary when new tuples are constructed @@ -23187,72 +23185,62 @@ const parentIsLayer = model.parent && isLayerModel(model.parent); if (isUnitModel(model) || isFacetModel(model)) { if (parentIsLayer) { - var _BinNode$makeFromEnco; - - head = (_BinNode$makeFromEnco = BinNode.makeFromEncoding(head, model)) !== null && _BinNode$makeFromEnco !== void 0 ? _BinNode$makeFromEnco : head; + head = BinNode.makeFromEncoding(head, model) ?? head; } } if (model.transforms.length > 0) { head = parseTransformArray(head, model, ancestorParse); } // create parse nodes for fields that need to be parsed (or flattened) implicitly const implicitSelection = getImplicitFromSelection(model); const implicitEncoding = getImplicitFromEncoding(model); - head = (_ParseNode$makeWithAn2 = ParseNode.makeWithAncestors(head, {}, { ...implicitSelection, + head = ParseNode.makeWithAncestors(head, {}, { ...implicitSelection, ...implicitEncoding - }, ancestorParse)) !== null && _ParseNode$makeWithAn2 !== void 0 ? _ParseNode$makeWithAn2 : head; + }, ancestorParse) ?? head; if (isUnitModel(model)) { head = GeoJSONNode.parseAll(head, model); head = GeoPointNode.parseAll(head, model); } if (isUnitModel(model) || isFacetModel(model)) { - var _TimeUnitNode$makeFro; - if (!parentIsLayer) { - var _BinNode$makeFromEnco2; - - head = (_BinNode$makeFromEnco2 = BinNode.makeFromEncoding(head, model)) !== null && _BinNode$makeFromEnco2 !== void 0 ? _BinNode$makeFromEnco2 : head; + head = BinNode.makeFromEncoding(head, model) ?? head; } - head = (_TimeUnitNode$makeFro = TimeUnitNode.makeFromEncoding(head, model)) !== null && _TimeUnitNode$makeFro !== void 0 ? _TimeUnitNode$makeFro : head; + head = TimeUnitNode.makeFromEncoding(head, model) ?? head; head = CalculateNode.parseAllForSortIndex(head, model); } // add an output node pre aggregation const rawName = model.getDataName(DataSourceType.Raw); const raw = new OutputNode(head, rawName, DataSourceType.Raw, outputNodeRefCounts); outputNodes[rawName] = raw; head = raw; if (isUnitModel(model)) { - var _ImputeNode$makeFromE, _StackNode$makeFromEn; - const agg = AggregateNode.makeFromEncoding(head, model); if (agg) { head = agg; if (requiresSelectionId(model)) { head = new IdentifierNode(head); } } - head = (_ImputeNode$makeFromE = ImputeNode.makeFromEncoding(head, model)) !== null && _ImputeNode$makeFromE !== void 0 ? _ImputeNode$makeFromE : head; - head = (_StackNode$makeFromEn = StackNode.makeFromEncoding(head, model)) !== null && _StackNode$makeFromEn !== void 0 ? _StackNode$makeFromEn : head; + head = ImputeNode.makeFromEncoding(head, model) ?? head; + head = StackNode.makeFromEncoding(head, model) ?? head; } if (isUnitModel(model)) { - var _FilterInvalidNode$ma; - - head = (_FilterInvalidNode$ma = FilterInvalidNode.make(head, model)) !== null && _FilterInvalidNode$ma !== void 0 ? _FilterInvalidNode$ma : head; + head = FilterInvalidNode.make(head, model) ?? head; } // output node for marks const mainName = model.getDataName(DataSourceType.Main); const main = new OutputNode(head, mainName, DataSourceType.Main, outputNodeRefCounts); @@ -23265,16 +23253,14 @@ let facetRoot = null; if (isFacetModel(model)) { - var _makeJoinAggregateFro; - const facetName = model.getName('facet'); // Derive new aggregate for facet's sort field // augment data source with new fields for crossed facet - head = (_makeJoinAggregateFro = makeJoinAggregateFromFacet(head, model.facet)) !== null && _makeJoinAggregateFro !== void 0 ? _makeJoinAggregateFro : head; + head = makeJoinAggregateFromFacet(head, model.facet) ?? head; facetRoot = new FacetNode(head, model, facetName, main.getSource()); outputNodes[facetName] = facetRoot; } return { ...model.component.data, @@ -23298,11 +23284,11 @@ if (((_spec$resolve = spec.resolve) === null || _spec$resolve === void 0 ? void 0 : (_spec$resolve$axis = _spec$resolve.axis) === null || _spec$resolve$axis === void 0 ? void 0 : _spec$resolve$axis.x) === 'shared' || ((_spec$resolve2 = spec.resolve) === null || _spec$resolve2 === void 0 ? void 0 : (_spec$resolve2$axis = _spec$resolve2.axis) === null || _spec$resolve2$axis === void 0 ? void 0 : _spec$resolve2$axis.y) === 'shared') { warn(CONCAT_CANNOT_SHARE_AXIS); } this.children = this.getChildren(spec).map((child, i) => { - return buildModel(child, this, this.getName('concat_' + i), undefined, config); + return buildModel(child, this, this.getName(`concat_${i}`), undefined, config); }); } parseData() { this.component.data = parseData(this); @@ -23404,10 +23390,14 @@ ...child.assembleGroup() }; }); } + assembleGroupStyle() { + return undefined; + } + assembleDefaultLayout() { const columns = this.layout.columns; return { ...(columns != null ? { columns: columns } : {}), @@ -23463,18 +23453,16 @@ return isSignalRef(this.explicit.orient); } } - function labels$1(model, channel, specifiedLabelsSpec) { - var _getFieldOrDatumDef; - + function labels(model, channel, specifiedLabelsSpec) { const { encoding, config } = model; - const fieldOrDatumDef = (_getFieldOrDatumDef = getFieldOrDatumDef(encoding[channel])) !== null && _getFieldOrDatumDef !== void 0 ? _getFieldOrDatumDef : getFieldOrDatumDef(encoding[getSecondaryRangeChannel(channel)]); + const fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]) ?? getFieldOrDatumDef(encoding[getSecondaryRangeChannel(channel)]); const axis = model.axis(channel) || {}; const { format, formatType } = axis; @@ -23550,14 +23538,12 @@ // skip if the child does not have a particular axis continue; } if (resolve.axis[channel] === 'independent') { - var _axes$channel; - // If axes are independent, concat the axisComponent array. - axes[channel] = ((_axes$channel = axes[channel]) !== null && _axes$channel !== void 0 ? _axes$channel : []).concat(child.component.axes[channel]); // Automatically adjust orient + axes[channel] = (axes[channel] ?? []).concat(child.component.axes[channel]); // Automatically adjust orient for (const axisComponent of child.component.axes[channel]) { const { value: orient, explicit @@ -23651,16 +23637,15 @@ }); merged.setWithExplicit(prop, mergedValueWithExplicit); } return merged; - } // eslint-disable-next-line @typescript-eslint/ban-types + } - - function isExplicit$1(value, property, axis, model, channel) { + function isExplicit(value, property, axis, model, channel) { if (property === 'disable') { - return axis !== undefined; // if axis is specified or null/false, then it's enable/disable state is explicit + return axis !== undefined; // if axis is specified or null/false, then its enable/disable state is explicit } axis = axis || {}; switch (property) { @@ -23696,11 +23681,11 @@ 'translate', // translate has dependent logic for bar's bin position and it's 0.5 by default in Vega. If a config overrides this value, we need to know. // the rest are not axis configs in Vega, but are in VL, so we need to set too. 'format', 'formatType', 'orient', 'labelExpr', 'tickCount', 'position', 'tickMinStep']); function parseAxis(channel, model) { - var _axis, _config, _config$axis, _axis2, _axis$encoding; + var _axis, _config, _config$axis, _axis2; let axis = model.axis(channel); const axisComponent = new AxisComponent(); const fieldOrDatumDef = getFieldOrDatumDef(model.encoding[channel]); const { @@ -23732,11 +23717,11 @@ }; // 1.2. Add properties for (const property of AXIS_COMPONENT_PROPERTIES) { const value = property in axisRules ? axisRules[property](ruleParams) : isAxisProperty(property) ? axis[property] : undefined; const hasValue = value !== undefined; - const explicit = isExplicit$1(value, property, axis, model, channel); + const explicit = isExplicit(value, property, axis, model, channel); if (hasValue && explicit) { axisComponent.set(property, value, explicit); } else { const { @@ -23748,31 +23733,28 @@ if (hasValue && !hasConfigValue) { // only set property if it is explicitly set or has no config value (otherwise we will accidentally override config) axisComponent.set(property, value, explicit); } else if ( // Cases need implicit values // 1. Axis config that aren't available in Vega - !(configFrom === 'vgAxisConfig') || // 2. Certain properties are always included (see `propsToAlwaysIncludeConfig`'s declaration for more details) - propsToAlwaysIncludeConfig.has(property) && hasConfigValue || // 3. Conditional axis values and signals + !(configFrom === 'vgAxisConfig') || propsToAlwaysIncludeConfig.has(property) && hasConfigValue || // 3. Conditional axis values and signals isConditionalAxisValue(configValue) || isSignalRef(configValue)) { // If a config is specified and is conditional, copy conditional value from axis config axisComponent.set(property, configValue, false); } } } // 2) Add guide encode definition groups - const axisEncoding = (_axis$encoding = axis.encoding) !== null && _axis$encoding !== void 0 ? _axis$encoding : {}; + const axisEncoding = axis.encoding ?? {}; const axisEncode = AXIS_PARTS.reduce((e, part) => { - var _axisEncoding$part; - if (!axisComponent.hasAxisPart(part)) { // No need to create encode for a disabled part. return e; } - const axisEncodingPart = guideEncodeEntry((_axisEncoding$part = axisEncoding[part]) !== null && _axisEncoding$part !== void 0 ? _axisEncoding$part : {}, model); - const value = part === 'labels' ? labels$1(model, channel, axisEncodingPart) : axisEncodingPart; + const axisEncodingPart = guideEncodeEntry(axisEncoding[part] ?? {}, model); + const value = part === 'labels' ? labels(model, channel, axisEncodingPart) : axisEncodingPart; if (value !== undefined && !isEmpty(value)) { e[part] = { update: value }; @@ -23791,11 +23773,11 @@ function initLayoutSize({ encoding, size }) { for (const channel of POSITION_SCALE_CHANNELS) { - const sizeType = getSizeChannel(channel); + const sizeType = getSizeChannel$1(channel); if (isStep(size[sizeType])) { if (isContinuousFieldOrDatumDef(encoding[channel])) { delete size[sizeType]; warn(stepDropped(sizeType)); @@ -23805,11 +23787,12 @@ return size; } function initMarkdef(originalMarkDef, encoding, config) { - const markDef = replaceExprRefInIndex(originalMarkDef); // set orient, which can be overridden by rules as sometimes the specified orient is invalid. + // FIXME: markDef expects that exprRefs are replaced recursively but replaceExprRef only replaces the top level + const markDef = replaceExprRef(originalMarkDef); // set orient, which can be overridden by rules as sometimes the specified orient is invalid. const specifiedOrient = getMarkPropOrConfig('orient', markDef, config); markDef.orient = orient(markDef.type, encoding, specifiedOrient); if (specifiedOrient !== undefined && specifiedOrient !== markDef.orient) { @@ -23858,11 +23841,11 @@ } function opacity(mark, encoding) { if (contains([POINT, TICK, CIRCLE, SQUARE], mark)) { // point-based marks - if (!isAggregate(encoding)) { + if (!isAggregate$1(encoding)) { return 0.7; } } return undefined; @@ -23883,11 +23866,11 @@ function orient(mark, encoding, specifiedOrient) { switch (mark) { case POINT: case CIRCLE: case SQUARE: - case TEXT$1: + case TEXT: case RECT: case IMAGE: // orient is meaningless for these marks. return undefined; } @@ -23970,11 +23953,13 @@ { // Tick is opposite to bar, line, area and never have ranged mark. const xIsContinuous = isContinuousFieldOrDatumDef(x); const yIsContinuous = isContinuousFieldOrDatumDef(y); - if (xIsContinuous && !yIsContinuous) { + if (specifiedOrient) { + return specifiedOrient; + } else if (xIsContinuous && !yIsContinuous) { return mark !== 'tick' ? 'horizontal' : 'vertical'; } else if (!xIsContinuous && yIsContinuous) { return mark !== 'tick' ? 'vertical' : 'horizontal'; } else if (xIsContinuous && yIsContinuous) { const xDef = x; // we can cast here since they are surely fieldDef @@ -23993,23 +23978,12 @@ return mark !== 'tick' ? 'vertical' : 'horizontal'; } else if (xDef.aggregate && !yDef.aggregate) { return mark !== 'tick' ? 'horizontal' : 'vertical'; } - if (specifiedOrient) { - // When ambiguous, use user specified one. - return specifiedOrient; - } - return 'vertical'; } else { - // Discrete x Discrete case - if (specifiedOrient) { - // When ambiguous, use user specified one. - return specifiedOrient; - } - return undefined; } } } @@ -24126,11 +24100,11 @@ size: 'ignore', theta: 'ignore' }), ...rectPosition(model, 'x', 'image'), ...rectPosition(model, 'y', 'image'), - ...text(model, 'url') + ...text$1(model, 'url') }; } }; const line = { @@ -24291,11 +24265,11 @@ }) }; } }; - const text$1 = { + const text = { vgMark: 'text', encodeEntry: model => { const { config, encoding @@ -24312,25 +24286,23 @@ defaultPos: 'mid' }), ...pointPosition('y', model, { defaultPos: 'mid' }), - ...text(model), + ...text$1(model), ...nonPosition('size', model, { vgChannel: 'fontSize' // VL's text size is fontSize }), ...nonPosition('angle', model), ...valueIfDefined('align', align(model.markDef, encoding, config)), ...valueIfDefined('baseline', baseline(model.markDef, encoding, config)), ...pointPosition('radius', model, { - defaultPos: null, - isMidPoint: true + defaultPos: null }), ...pointPosition('theta', model, { - defaultPos: null, - isMidPoint: true + defaultPos: null }) }; } }; @@ -24391,31 +24363,29 @@ }; } }; function defaultSize(model) { - var _getMarkPropOrConfig; - const { config, markDef } = model; const { orient } = markDef; const vgSizeChannel = orient === 'horizontal' ? 'width' : 'height'; const scale = model.getScaleComponent(orient === 'horizontal' ? 'x' : 'y'); - const markPropOrConfig = (_getMarkPropOrConfig = getMarkPropOrConfig('size', markDef, config, { + const markPropOrConfig = getMarkPropOrConfig('size', markDef, config, { vgChannel: vgSizeChannel - })) !== null && _getMarkPropOrConfig !== void 0 ? _getMarkPropOrConfig : config.tick.bandSize; + }) ?? config.tick.bandSize; if (markPropOrConfig !== undefined) { return markPropOrConfig; } else { const scaleRange = scale ? scale.get('range') : undefined; - if (scaleRange && isVgRangeStep(scaleRange) && isNumber(scaleRange.step)) { + if (scaleRange && isVgRangeStep(scaleRange) && vegaUtil.isNumber(scaleRange.step)) { return scaleRange.step * 3 / 4; } const defaultViewStep = getViewConfigDiscreteStep(config.view, vgSizeChannel); return defaultViewStep * 3 / 4; @@ -24432,11 +24402,11 @@ line, point, rect, rule, square, - text: text$1, + text, tick, trail }; function parseMarkGroups(model) { if (contains([LINE, AREA, TRAIL], model.mark)) { @@ -24444,11 +24414,11 @@ if (details.length > 0) { return getPathGroups(model, details); } // otherwise use standard mark groups - } else if (contains([BAR], model.mark)) { + } else if (model.mark === BAR) { const hasCornerRadius = VG_CORNERRADIUS_CHANNELS.some(prop => getMarkPropOrConfig(prop, model.markDef, model.config)); if (model.stack && !model.fieldDef('size') && hasCornerRadius) { return getGroupsForStackedBarWithCornerRadius(model); } @@ -24525,11 +24495,11 @@ }), stackField({ prefix: 'max', suffix: 'end', expr })]; - return "".concat(func, "(").concat(vgFieldMinMax.map(field => "scale('".concat(fieldScale, "',").concat(field, ")")).join(','), ")"); + return `${func}(${vgFieldMinMax.map(field => `scale('${fieldScale}',${field})`).join(',')})`; }; let groupUpdate; let innerGroupUpdate; // Build the encoding for group and an inner group @@ -24619,20 +24589,28 @@ if (configValue) { mark.encode.update[key] = { value: 0 }; } - } // For bin and time unit, we have to add bin/timeunit -end channels. + } + const groupby = []; - const groupByField = model.fieldDef(model.stack.groupbyChannel); - const groupby = vgField(groupByField) ? [vgField(groupByField)] : []; + if (model.stack.groupbyChannel) { + // For bin and time unit, we have to add bin/timeunit -end channels. + const groupByField = model.fieldDef(model.stack.groupbyChannel); + const field = vgField(groupByField); - if ((groupByField === null || groupByField === void 0 ? void 0 : groupByField.bin) || (groupByField === null || groupByField === void 0 ? void 0 : groupByField.timeUnit)) { - groupby.push(vgField(groupByField, { - binSuffix: 'end' - })); + if (field) { + groupby.push(field); + } + + if (groupByField !== null && groupByField !== void 0 && groupByField.bin || groupByField !== null && groupByField !== void 0 && groupByField.timeUnit) { + groupby.push(vgField(groupByField, { + binSuffix: 'end' + })); + } } const strokeProperties = ['stroke', 'strokeWidth', 'strokeJoin', 'strokeCap', 'strokeDash', 'strokeDashOffset', 'strokeMiterLimit', 'strokeOpacity']; // Generate stroke properties for the group groupUpdate = strokeProperties.reduce((encode, prop) => { @@ -24694,23 +24672,23 @@ marks: [mark] }] }]; } - function getSort$1(model) { + function getSort(model) { const { encoding, stack, mark, markDef, config } = model; const order = encoding.order; - if (!isArray(order) && isValueDef(order) && isNullOrFalse(order.value) || !order && isNullOrFalse(getMarkPropOrConfig('order', markDef, config))) { + if (!vegaUtil.isArray(order) && isValueDef(order) && isNullOrFalse(order.value) || !order && isNullOrFalse(getMarkPropOrConfig('order', markDef, config))) { return undefined; - } else if ((isArray(order) || isFieldDef(order)) && !stack) { + } else if ((vegaUtil.isArray(order) || isFieldDef(order)) && !stack) { // Sort by the order field if it is specified and the field is not stacked. (For stacked field, order specify stack order.) return sortParams(order, { expr: 'datum' }); } else if (isPathMark(mark)) { @@ -24719,11 +24697,11 @@ const dimensionChannelDef = encoding[dimensionChannel]; if (isFieldDef(dimensionChannelDef)) { const s = dimensionChannelDef.sort; - if (isArray(s)) { + if (vegaUtil.isArray(s)) { return { field: vgField(dimensionChannelDef, { prefix: dimensionChannel, suffix: 'sort_index', expr: 'datum' @@ -24732,11 +24710,11 @@ } else if (isSortField(s)) { return { field: vgField({ // FIXME: this op might not already exist? // FIXME: what if dimensionChannel (x or y) contains custom domain? - aggregate: isAggregate(model.encoding) ? s.op : undefined, + aggregate: isAggregate$1(model.encoding) ? s.op : undefined, field: s.field }, { expr: 'datum' }) }; @@ -24749,14 +24727,16 @@ order: s.order }; } else if (s === null) { return undefined; } else { + var _model$stack; + return { field: vgField(dimensionChannelDef, { // For stack with imputation, we only have bin_mid - binSuffix: model.stack && model.stack.impute ? 'mid' : undefined, + binSuffix: (_model$stack = model.stack) !== null && _model$stack !== void 0 && _model$stack.impute ? 'mid' : undefined, expr: 'datum' }) }; } } @@ -24777,11 +24757,11 @@ config } = model; const clip = getFirstDefined(markDef.clip, scaleClip(model), projectionClip(model)); const style = getStyles(markDef); const key = encoding.key; - const sort = getSort$1(model); + const sort = getSort(model); const interactive = interactiveFlag(model); const aria = getMarkPropOrConfig('aria', markDef, config); const postEncodingTransform = markCompiler[mark].postEncodingTransform ? markCompiler[mark].postEncodingTransform(model) : null; return [{ name: model.getName('marks'), @@ -24876,11 +24856,11 @@ _defineProperty(this, "specifiedLegends", {}); _defineProperty(this, "specifiedProjection", {}); - _defineProperty(this, "selection", {}); + _defineProperty(this, "selection", []); _defineProperty(this, "children", []); const markDef = isMarkDef(spec.mark) ? { ...spec.mark } : { @@ -24912,11 +24892,11 @@ this.specifiedScales = this.initScales(mark, encoding); this.specifiedAxes = this.initAxes(encoding); this.specifiedLegends = this.initLegends(encoding); this.specifiedProjection = spec.projection; // Selections will be initialized upon parse. - this.selection = spec.selection; + this.selection = (spec.params ?? []).filter(p => isSelectionParameter(p)); } get hasProjection() { const { encoding @@ -24947,31 +24927,30 @@ initScales(mark, encoding) { return SCALE_CHANNELS.reduce((scales, channel) => { const fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]); if (fieldOrDatumDef) { - var _fieldOrDatumDef$scal; - - scales[channel] = this.initScale((_fieldOrDatumDef$scal = fieldOrDatumDef.scale) !== null && _fieldOrDatumDef$scal !== void 0 ? _fieldOrDatumDef$scal : {}); + scales[channel] = this.initScale(fieldOrDatumDef.scale ?? {}); } return scales; }, {}); } initScale(scale) { const { domain, range - } = scale; - const scaleInternal = replaceExprRefInIndex(scale); + } = scale; // TODO: we could simplify this function if we had a recursive replace function - if (isArray(domain)) { + const scaleInternal = replaceExprRef(scale); + + if (vegaUtil.isArray(domain)) { scaleInternal.domain = domain.map(signalRefOrValue); } - if (isArray(range)) { + if (vegaUtil.isArray(range)) { scaleInternal.range = range.map(signalRefOrValue); } return scaleInternal; } @@ -25009,11 +24988,11 @@ return NONPOSITION_SCALE_CHANNELS.reduce((_legend, channel) => { const fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]); if (fieldOrDatumDef && supportLegend(channel)) { const legend = fieldOrDatumDef.legend; - _legend[channel] = legend ? replaceExprRefInIndex(legend) // convert truthy value to object + _legend[channel] = legend ? replaceExprRef(legend) // convert truthy value to object : legend; } return _legend; }, {}); @@ -25058,23 +25037,37 @@ assembleLayoutSignals() { return assembleLayoutSignals(this); } assembleMarks() { - var _this$component$mark; - - let marks = (_this$component$mark = this.component.mark) !== null && _this$component$mark !== void 0 ? _this$component$mark : []; // If this unit is part of a layer, selections should augment + let marks = this.component.mark ?? []; // If this unit is part of a layer, selections should augment // all in concert rather than each unit individually. This // ensures correct interleaving of clipping and brushed marks. if (!this.parent || !isLayerModel(this.parent)) { marks = assembleUnitSelectionMarks(this, marks); } return marks.map(this.correctDataNames); } + assembleGroupStyle() { + const { + style + } = this.view || {}; + + if (style !== undefined) { + return style; + } + + if (this.encoding.x || this.encoding.y) { + return 'cell'; + } else { + return undefined; + } + } + getMapping() { return this.encoding; } get mark() { @@ -25118,13 +25111,13 @@ height: spec.height } : {}) }; this.children = spec.layer.map((layer, i) => { if (isLayerSpec(layer)) { - return new LayerModel(layer, this, this.getName('layer_' + i), layoutSize, config); + return new LayerModel(layer, this, this.getName(`layer_${i}`), layoutSize, config); } else if (isUnitSpec(layer)) { - return new UnitModel(layer, this, this.getName('layer_' + i), layoutSize, config); + return new UnitModel(layer, this, this.getName(`layer_${i}`), layoutSize, config); } throw new Error(invalidSpec(layer)); }); } @@ -25185,10 +25178,23 @@ assembleSelectionData(data) { return this.children.reduce((db, child) => child.assembleSelectionData(db), data); } + assembleGroupStyle() { + const uniqueStyles = new Set(); + + for (const child of this.children) { + for (const style of vegaUtil.array(child.assembleGroupStyle())) { + uniqueStyles.add(style); + } + } + + const styles = Array.from(uniqueStyles); + return styles.length > 1 ? styles : styles.length === 1 ? styles[0] : undefined; + } + assembleTitle() { let title = super.assembleTitle(); if (title) { return title; @@ -25281,11 +25287,11 @@ setTitleFormatter(opt.fieldTitle); } try { // 1. Initialize config by deep merging default config with the config provided via option and the input spec. - const config = initConfig(mergeConfig(opt.config, inputSpec.config)); // 2. Normalize: Convert input spec -> normalized spec + const config = initConfig(vegaUtil.mergeConfig(opt.config, inputSpec.config)); // 2. Normalize: Convert input spec -> normalized spec // - Decompose all extended unit specs into composition of unit spec. For example, a box plot get expanded into multiple layers of bars, ticks, and rules. The shorthand row/column channel is also expanded to a facet spec. // - Normalize autosize and width or height spec const spec = normalize(inputSpec, config); // 3. Build Model: normalized spec -> Model (a tree structure) // This phases instantiates the models with default config by doing a top-down traversal. This allows us to pass properties that child models derive from their parents via their constructors. @@ -25336,11 +25342,11 @@ }; if (model.hasAxisOrientSignalRef()) { autosize.resize = true; } - } else if (isString(autosize)) { + } else if (vegaUtil.isString(autosize)) { autosize = { type: autosize }; } @@ -25429,25 +25435,27 @@ usermeta } : {}) }; } + const version = pkg.version; + exports.accessPathDepth = accessPathDepth; exports.accessPathWithDatum = accessPathWithDatum; exports.compile = compile; exports.contains = contains; exports.deepEqual = deepEqual; exports.deleteNestedProperty = deleteNestedProperty; exports.duplicate = duplicate; - exports.entries = entries; + exports.entries = entries$1; exports.every = every; exports.fieldIntersection = fieldIntersection; exports.flatAccessWithDatum = flatAccessWithDatum; exports.getFirstDefined = getFirstDefined; exports.hasIntersection = hasIntersection; exports.hash = hash; exports.internalField = internalField; - exports.isBoolean = isBoolean$1; + exports.isBoolean = isBoolean; exports.isEmpty = isEmpty; exports.isEqual = isEqual; exports.isInternalField = isInternalField; exports.isNullOrFalse = isNullOrFalse; exports.isNumeric = isNumeric;