逐步完成前后端服务器
This commit is contained in:
224
frontend/node_modules/echarts/lib/data/helper/SeriesDataSchema.js
generated
vendored
Normal file
224
frontend/node_modules/echarts/lib/data/helper/SeriesDataSchema.js
generated
vendored
Normal file
@ -0,0 +1,224 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { createHashMap, isObject, retrieve2 } from 'zrender/lib/core/util.js';
|
||||
import { makeInner } from '../../util/model.js';
|
||||
import { shouldRetrieveDataByName } from '../Source.js';
|
||||
var inner = makeInner();
|
||||
var dimTypeShort = {
|
||||
float: 'f',
|
||||
int: 'i',
|
||||
ordinal: 'o',
|
||||
number: 'n',
|
||||
time: 't'
|
||||
};
|
||||
/**
|
||||
* Represents the dimension requirement of a series.
|
||||
*
|
||||
* NOTICE:
|
||||
* When there are too many dimensions in dataset and many series, only the used dimensions
|
||||
* (i.e., used by coord sys and declared in `series.encode`) are add to `dimensionDefineList`.
|
||||
* But users may query data by other unused dimension names.
|
||||
* In this case, users can only query data if and only if they have defined dimension names
|
||||
* via ec option, so we provide `getDimensionIndexFromSource`, which only query them from
|
||||
* `source` dimensions.
|
||||
*/
|
||||
var SeriesDataSchema = /** @class */function () {
|
||||
function SeriesDataSchema(opt) {
|
||||
this.dimensions = opt.dimensions;
|
||||
this._dimOmitted = opt.dimensionOmitted;
|
||||
this.source = opt.source;
|
||||
this._fullDimCount = opt.fullDimensionCount;
|
||||
this._updateDimOmitted(opt.dimensionOmitted);
|
||||
}
|
||||
SeriesDataSchema.prototype.isDimensionOmitted = function () {
|
||||
return this._dimOmitted;
|
||||
};
|
||||
SeriesDataSchema.prototype._updateDimOmitted = function (dimensionOmitted) {
|
||||
this._dimOmitted = dimensionOmitted;
|
||||
if (!dimensionOmitted) {
|
||||
return;
|
||||
}
|
||||
if (!this._dimNameMap) {
|
||||
this._dimNameMap = ensureSourceDimNameMap(this.source);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* @caution Can only be used when `dimensionOmitted: true`.
|
||||
*
|
||||
* Get index by user defined dimension name (i.e., not internal generate name).
|
||||
* That is, get index from `dimensionsDefine`.
|
||||
* If no `dimensionsDefine`, or no name get, return -1.
|
||||
*/
|
||||
SeriesDataSchema.prototype.getSourceDimensionIndex = function (dimName) {
|
||||
return retrieve2(this._dimNameMap.get(dimName), -1);
|
||||
};
|
||||
/**
|
||||
* @caution Can only be used when `dimensionOmitted: true`.
|
||||
*
|
||||
* Notice: may return `null`/`undefined` if user not specify dimension names.
|
||||
*/
|
||||
SeriesDataSchema.prototype.getSourceDimension = function (dimIndex) {
|
||||
var dimensionsDefine = this.source.dimensionsDefine;
|
||||
if (dimensionsDefine) {
|
||||
return dimensionsDefine[dimIndex];
|
||||
}
|
||||
};
|
||||
SeriesDataSchema.prototype.makeStoreSchema = function () {
|
||||
var dimCount = this._fullDimCount;
|
||||
var willRetrieveDataByName = shouldRetrieveDataByName(this.source);
|
||||
var makeHashStrict = !shouldOmitUnusedDimensions(dimCount);
|
||||
// If source don't have dimensions or series don't omit unsed dimensions.
|
||||
// Generate from seriesDimList directly
|
||||
var dimHash = '';
|
||||
var dims = [];
|
||||
for (var fullDimIdx = 0, seriesDimIdx = 0; fullDimIdx < dimCount; fullDimIdx++) {
|
||||
var property = void 0;
|
||||
var type = void 0;
|
||||
var ordinalMeta = void 0;
|
||||
var seriesDimDef = this.dimensions[seriesDimIdx];
|
||||
// The list has been sorted by `storeDimIndex` asc.
|
||||
if (seriesDimDef && seriesDimDef.storeDimIndex === fullDimIdx) {
|
||||
property = willRetrieveDataByName ? seriesDimDef.name : null;
|
||||
type = seriesDimDef.type;
|
||||
ordinalMeta = seriesDimDef.ordinalMeta;
|
||||
seriesDimIdx++;
|
||||
} else {
|
||||
var sourceDimDef = this.getSourceDimension(fullDimIdx);
|
||||
if (sourceDimDef) {
|
||||
property = willRetrieveDataByName ? sourceDimDef.name : null;
|
||||
type = sourceDimDef.type;
|
||||
}
|
||||
}
|
||||
dims.push({
|
||||
property: property,
|
||||
type: type,
|
||||
ordinalMeta: ordinalMeta
|
||||
});
|
||||
// If retrieving data by index,
|
||||
// use <index, type, ordinalMeta> to determine whether data can be shared.
|
||||
// (Because in this case there might be no dimension name defined in dataset, but indices always exists).
|
||||
// (Indices are always 0, 1, 2, ..., so we can ignore them to shorten the hash).
|
||||
// Otherwise if retrieving data by property name (like `data: [{aa: 123, bb: 765}, ...]`),
|
||||
// use <property, type, ordinalMeta> in hash.
|
||||
if (willRetrieveDataByName && property != null
|
||||
// For data stack, we have make sure each series has its own dim on this store.
|
||||
// So we do not add property to hash to make sure they can share this store.
|
||||
&& (!seriesDimDef || !seriesDimDef.isCalculationCoord)) {
|
||||
dimHash += makeHashStrict
|
||||
// Use escape character '`' in case that property name contains '$'.
|
||||
? property.replace(/\`/g, '`1').replace(/\$/g, '`2')
|
||||
// For better performance, when there are large dimensions, tolerant this defects that hardly meet.
|
||||
: property;
|
||||
}
|
||||
dimHash += '$';
|
||||
dimHash += dimTypeShort[type] || 'f';
|
||||
if (ordinalMeta) {
|
||||
dimHash += ordinalMeta.uid;
|
||||
}
|
||||
dimHash += '$';
|
||||
}
|
||||
// Source from endpoint(usually series) will be read differently
|
||||
// when seriesLayoutBy or startIndex(which is affected by sourceHeader) are different.
|
||||
// So we use this three props as key.
|
||||
var source = this.source;
|
||||
var hash = [source.seriesLayoutBy, source.startIndex, dimHash].join('$$');
|
||||
return {
|
||||
dimensions: dims,
|
||||
hash: hash
|
||||
};
|
||||
};
|
||||
SeriesDataSchema.prototype.makeOutputDimensionNames = function () {
|
||||
var result = [];
|
||||
for (var fullDimIdx = 0, seriesDimIdx = 0; fullDimIdx < this._fullDimCount; fullDimIdx++) {
|
||||
var name_1 = void 0;
|
||||
var seriesDimDef = this.dimensions[seriesDimIdx];
|
||||
// The list has been sorted by `storeDimIndex` asc.
|
||||
if (seriesDimDef && seriesDimDef.storeDimIndex === fullDimIdx) {
|
||||
if (!seriesDimDef.isCalculationCoord) {
|
||||
name_1 = seriesDimDef.name;
|
||||
}
|
||||
seriesDimIdx++;
|
||||
} else {
|
||||
var sourceDimDef = this.getSourceDimension(fullDimIdx);
|
||||
if (sourceDimDef) {
|
||||
name_1 = sourceDimDef.name;
|
||||
}
|
||||
}
|
||||
result.push(name_1);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
SeriesDataSchema.prototype.appendCalculationDimension = function (dimDef) {
|
||||
this.dimensions.push(dimDef);
|
||||
dimDef.isCalculationCoord = true;
|
||||
this._fullDimCount++;
|
||||
// If append dimension on a data store, consider the store
|
||||
// might be shared by different series, series dimensions not
|
||||
// really map to store dimensions.
|
||||
this._updateDimOmitted(true);
|
||||
};
|
||||
return SeriesDataSchema;
|
||||
}();
|
||||
export { SeriesDataSchema };
|
||||
export function isSeriesDataSchema(schema) {
|
||||
return schema instanceof SeriesDataSchema;
|
||||
}
|
||||
export function createDimNameMap(dimsDef) {
|
||||
var dataDimNameMap = createHashMap();
|
||||
for (var i = 0; i < (dimsDef || []).length; i++) {
|
||||
var dimDefItemRaw = dimsDef[i];
|
||||
var userDimName = isObject(dimDefItemRaw) ? dimDefItemRaw.name : dimDefItemRaw;
|
||||
if (userDimName != null && dataDimNameMap.get(userDimName) == null) {
|
||||
dataDimNameMap.set(userDimName, i);
|
||||
}
|
||||
}
|
||||
return dataDimNameMap;
|
||||
}
|
||||
export function ensureSourceDimNameMap(source) {
|
||||
var innerSource = inner(source);
|
||||
return innerSource.dimNameMap || (innerSource.dimNameMap = createDimNameMap(source.dimensionsDefine));
|
||||
}
|
||||
export function shouldOmitUnusedDimensions(dimCount) {
|
||||
return dimCount > 30;
|
||||
}
|
322
frontend/node_modules/echarts/lib/data/helper/createDimensions.js
generated
vendored
Normal file
322
frontend/node_modules/echarts/lib/data/helper/createDimensions.js
generated
vendored
Normal file
@ -0,0 +1,322 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { VISUAL_DIMENSIONS } from '../../util/types.js';
|
||||
import SeriesDimensionDefine from '../SeriesDimensionDefine.js';
|
||||
import { createHashMap, defaults, each, extend, isObject, isString } from 'zrender/lib/core/util.js';
|
||||
import { createSourceFromSeriesDataOption, isSourceInstance } from '../Source.js';
|
||||
import { CtorInt32Array } from '../DataStore.js';
|
||||
import { normalizeToArray } from '../../util/model.js';
|
||||
import { BE_ORDINAL, guessOrdinal } from './sourceHelper.js';
|
||||
import { createDimNameMap, ensureSourceDimNameMap, SeriesDataSchema, shouldOmitUnusedDimensions } from './SeriesDataSchema.js';
|
||||
/**
|
||||
* For outside usage compat (like echarts-gl are using it).
|
||||
*/
|
||||
export function createDimensions(source, opt) {
|
||||
return prepareSeriesDataSchema(source, opt).dimensions;
|
||||
}
|
||||
/**
|
||||
* This method builds the relationship between:
|
||||
* + "what the coord sys or series requires (see `coordDimensions`)",
|
||||
* + "what the user defines (in `encode` and `dimensions`, see `opt.dimensionsDefine` and `opt.encodeDefine`)"
|
||||
* + "what the data source provids (see `source`)".
|
||||
*
|
||||
* Some guess strategy will be adapted if user does not define something.
|
||||
* If no 'value' dimension specified, the first no-named dimension will be
|
||||
* named as 'value'.
|
||||
*
|
||||
* @return The results are always sorted by `storeDimIndex` asc.
|
||||
*/
|
||||
export default function prepareSeriesDataSchema(
|
||||
// TODO: TYPE completeDimensions type
|
||||
source, opt) {
|
||||
if (!isSourceInstance(source)) {
|
||||
source = createSourceFromSeriesDataOption(source);
|
||||
}
|
||||
opt = opt || {};
|
||||
var sysDims = opt.coordDimensions || [];
|
||||
var dimsDef = opt.dimensionsDefine || source.dimensionsDefine || [];
|
||||
var coordDimNameMap = createHashMap();
|
||||
var resultList = [];
|
||||
var dimCount = getDimCount(source, sysDims, dimsDef, opt.dimensionsCount);
|
||||
// Try to ignore unused dimensions if sharing a high dimension datastore
|
||||
// 30 is an experience value.
|
||||
var omitUnusedDimensions = opt.canOmitUnusedDimensions && shouldOmitUnusedDimensions(dimCount);
|
||||
var isUsingSourceDimensionsDef = dimsDef === source.dimensionsDefine;
|
||||
var dataDimNameMap = isUsingSourceDimensionsDef ? ensureSourceDimNameMap(source) : createDimNameMap(dimsDef);
|
||||
var encodeDef = opt.encodeDefine;
|
||||
if (!encodeDef && opt.encodeDefaulter) {
|
||||
encodeDef = opt.encodeDefaulter(source, dimCount);
|
||||
}
|
||||
var encodeDefMap = createHashMap(encodeDef);
|
||||
var indicesMap = new CtorInt32Array(dimCount);
|
||||
for (var i = 0; i < indicesMap.length; i++) {
|
||||
indicesMap[i] = -1;
|
||||
}
|
||||
function getResultItem(dimIdx) {
|
||||
var idx = indicesMap[dimIdx];
|
||||
if (idx < 0) {
|
||||
var dimDefItemRaw = dimsDef[dimIdx];
|
||||
var dimDefItem = isObject(dimDefItemRaw) ? dimDefItemRaw : {
|
||||
name: dimDefItemRaw
|
||||
};
|
||||
var resultItem = new SeriesDimensionDefine();
|
||||
var userDimName = dimDefItem.name;
|
||||
if (userDimName != null && dataDimNameMap.get(userDimName) != null) {
|
||||
// Only if `series.dimensions` is defined in option
|
||||
// displayName, will be set, and dimension will be displayed vertically in
|
||||
// tooltip by default.
|
||||
resultItem.name = resultItem.displayName = userDimName;
|
||||
}
|
||||
dimDefItem.type != null && (resultItem.type = dimDefItem.type);
|
||||
dimDefItem.displayName != null && (resultItem.displayName = dimDefItem.displayName);
|
||||
var newIdx = resultList.length;
|
||||
indicesMap[dimIdx] = newIdx;
|
||||
resultItem.storeDimIndex = dimIdx;
|
||||
resultList.push(resultItem);
|
||||
return resultItem;
|
||||
}
|
||||
return resultList[idx];
|
||||
}
|
||||
if (!omitUnusedDimensions) {
|
||||
for (var i = 0; i < dimCount; i++) {
|
||||
getResultItem(i);
|
||||
}
|
||||
}
|
||||
// Set `coordDim` and `coordDimIndex` by `encodeDefMap` and normalize `encodeDefMap`.
|
||||
encodeDefMap.each(function (dataDimsRaw, coordDim) {
|
||||
var dataDims = normalizeToArray(dataDimsRaw).slice();
|
||||
// Note: It is allowed that `dataDims.length` is `0`, e.g., options is
|
||||
// `{encode: {x: -1, y: 1}}`. Should not filter anything in
|
||||
// this case.
|
||||
if (dataDims.length === 1 && !isString(dataDims[0]) && dataDims[0] < 0) {
|
||||
encodeDefMap.set(coordDim, false);
|
||||
return;
|
||||
}
|
||||
var validDataDims = encodeDefMap.set(coordDim, []);
|
||||
each(dataDims, function (resultDimIdxOrName, idx) {
|
||||
// The input resultDimIdx can be dim name or index.
|
||||
var resultDimIdx = isString(resultDimIdxOrName) ? dataDimNameMap.get(resultDimIdxOrName) : resultDimIdxOrName;
|
||||
if (resultDimIdx != null && resultDimIdx < dimCount) {
|
||||
validDataDims[idx] = resultDimIdx;
|
||||
applyDim(getResultItem(resultDimIdx), coordDim, idx);
|
||||
}
|
||||
});
|
||||
});
|
||||
// Apply templates and default order from `sysDims`.
|
||||
var availDimIdx = 0;
|
||||
each(sysDims, function (sysDimItemRaw) {
|
||||
var coordDim;
|
||||
var sysDimItemDimsDef;
|
||||
var sysDimItemOtherDims;
|
||||
var sysDimItem;
|
||||
if (isString(sysDimItemRaw)) {
|
||||
coordDim = sysDimItemRaw;
|
||||
sysDimItem = {};
|
||||
} else {
|
||||
sysDimItem = sysDimItemRaw;
|
||||
coordDim = sysDimItem.name;
|
||||
var ordinalMeta = sysDimItem.ordinalMeta;
|
||||
sysDimItem.ordinalMeta = null;
|
||||
sysDimItem = extend({}, sysDimItem);
|
||||
sysDimItem.ordinalMeta = ordinalMeta;
|
||||
// `coordDimIndex` should not be set directly.
|
||||
sysDimItemDimsDef = sysDimItem.dimsDef;
|
||||
sysDimItemOtherDims = sysDimItem.otherDims;
|
||||
sysDimItem.name = sysDimItem.coordDim = sysDimItem.coordDimIndex = sysDimItem.dimsDef = sysDimItem.otherDims = null;
|
||||
}
|
||||
var dataDims = encodeDefMap.get(coordDim);
|
||||
// negative resultDimIdx means no need to mapping.
|
||||
if (dataDims === false) {
|
||||
return;
|
||||
}
|
||||
dataDims = normalizeToArray(dataDims);
|
||||
// dimensions provides default dim sequences.
|
||||
if (!dataDims.length) {
|
||||
for (var i = 0; i < (sysDimItemDimsDef && sysDimItemDimsDef.length || 1); i++) {
|
||||
while (availDimIdx < dimCount && getResultItem(availDimIdx).coordDim != null) {
|
||||
availDimIdx++;
|
||||
}
|
||||
availDimIdx < dimCount && dataDims.push(availDimIdx++);
|
||||
}
|
||||
}
|
||||
// Apply templates.
|
||||
each(dataDims, function (resultDimIdx, coordDimIndex) {
|
||||
var resultItem = getResultItem(resultDimIdx);
|
||||
// Coordinate system has a higher priority on dim type than source.
|
||||
if (isUsingSourceDimensionsDef && sysDimItem.type != null) {
|
||||
resultItem.type = sysDimItem.type;
|
||||
}
|
||||
applyDim(defaults(resultItem, sysDimItem), coordDim, coordDimIndex);
|
||||
if (resultItem.name == null && sysDimItemDimsDef) {
|
||||
var sysDimItemDimsDefItem = sysDimItemDimsDef[coordDimIndex];
|
||||
!isObject(sysDimItemDimsDefItem) && (sysDimItemDimsDefItem = {
|
||||
name: sysDimItemDimsDefItem
|
||||
});
|
||||
resultItem.name = resultItem.displayName = sysDimItemDimsDefItem.name;
|
||||
resultItem.defaultTooltip = sysDimItemDimsDefItem.defaultTooltip;
|
||||
}
|
||||
// FIXME refactor, currently only used in case: {otherDims: {tooltip: false}}
|
||||
sysDimItemOtherDims && defaults(resultItem.otherDims, sysDimItemOtherDims);
|
||||
});
|
||||
});
|
||||
function applyDim(resultItem, coordDim, coordDimIndex) {
|
||||
if (VISUAL_DIMENSIONS.get(coordDim) != null) {
|
||||
resultItem.otherDims[coordDim] = coordDimIndex;
|
||||
} else {
|
||||
resultItem.coordDim = coordDim;
|
||||
resultItem.coordDimIndex = coordDimIndex;
|
||||
coordDimNameMap.set(coordDim, true);
|
||||
}
|
||||
}
|
||||
// Make sure the first extra dim is 'value'.
|
||||
var generateCoord = opt.generateCoord;
|
||||
var generateCoordCount = opt.generateCoordCount;
|
||||
var fromZero = generateCoordCount != null;
|
||||
generateCoordCount = generateCoord ? generateCoordCount || 1 : 0;
|
||||
var extra = generateCoord || 'value';
|
||||
function ifNoNameFillWithCoordName(resultItem) {
|
||||
if (resultItem.name == null) {
|
||||
// Duplication will be removed in the next step.
|
||||
resultItem.name = resultItem.coordDim;
|
||||
}
|
||||
}
|
||||
// Set dim `name` and other `coordDim` and other props.
|
||||
if (!omitUnusedDimensions) {
|
||||
for (var resultDimIdx = 0; resultDimIdx < dimCount; resultDimIdx++) {
|
||||
var resultItem = getResultItem(resultDimIdx);
|
||||
var coordDim = resultItem.coordDim;
|
||||
if (coordDim == null) {
|
||||
// TODO no need to generate coordDim for isExtraCoord?
|
||||
resultItem.coordDim = genCoordDimName(extra, coordDimNameMap, fromZero);
|
||||
resultItem.coordDimIndex = 0;
|
||||
// Series specified generateCoord is using out.
|
||||
if (!generateCoord || generateCoordCount <= 0) {
|
||||
resultItem.isExtraCoord = true;
|
||||
}
|
||||
generateCoordCount--;
|
||||
}
|
||||
ifNoNameFillWithCoordName(resultItem);
|
||||
if (resultItem.type == null && (guessOrdinal(source, resultDimIdx) === BE_ORDINAL.Must
|
||||
// Consider the case:
|
||||
// {
|
||||
// dataset: {source: [
|
||||
// ['2001', 123],
|
||||
// ['2002', 456],
|
||||
// ...
|
||||
// ['The others', 987],
|
||||
// ]},
|
||||
// series: {type: 'pie'}
|
||||
// }
|
||||
// The first column should better be treated as a "ordinal" although it
|
||||
// might not be detected as an "ordinal" by `guessOrdinal`.
|
||||
|| resultItem.isExtraCoord && (resultItem.otherDims.itemName != null || resultItem.otherDims.seriesName != null))) {
|
||||
resultItem.type = 'ordinal';
|
||||
}
|
||||
}
|
||||
} else {
|
||||
each(resultList, function (resultItem) {
|
||||
// PENDING: guessOrdinal or let user specify type: 'ordinal' manually?
|
||||
ifNoNameFillWithCoordName(resultItem);
|
||||
});
|
||||
// Sort dimensions: there are some rule that use the last dim as label,
|
||||
// and for some latter travel process easier.
|
||||
resultList.sort(function (item0, item1) {
|
||||
return item0.storeDimIndex - item1.storeDimIndex;
|
||||
});
|
||||
}
|
||||
removeDuplication(resultList);
|
||||
return new SeriesDataSchema({
|
||||
source: source,
|
||||
dimensions: resultList,
|
||||
fullDimensionCount: dimCount,
|
||||
dimensionOmitted: omitUnusedDimensions
|
||||
});
|
||||
}
|
||||
function removeDuplication(result) {
|
||||
var duplicationMap = createHashMap();
|
||||
for (var i = 0; i < result.length; i++) {
|
||||
var dim = result[i];
|
||||
var dimOriginalName = dim.name;
|
||||
var count = duplicationMap.get(dimOriginalName) || 0;
|
||||
if (count > 0) {
|
||||
// Starts from 0.
|
||||
dim.name = dimOriginalName + (count - 1);
|
||||
}
|
||||
count++;
|
||||
duplicationMap.set(dimOriginalName, count);
|
||||
}
|
||||
}
|
||||
// ??? TODO
|
||||
// Originally detect dimCount by data[0]. Should we
|
||||
// optimize it to only by sysDims and dimensions and encode.
|
||||
// So only necessary dims will be initialized.
|
||||
// But
|
||||
// (1) custom series should be considered. where other dims
|
||||
// may be visited.
|
||||
// (2) sometimes user need to calculate bubble size or use visualMap
|
||||
// on other dimensions besides coordSys needed.
|
||||
// So, dims that is not used by system, should be shared in data store?
|
||||
function getDimCount(source, sysDims, dimsDef, optDimCount) {
|
||||
// Note that the result dimCount should not small than columns count
|
||||
// of data, otherwise `dataDimNameMap` checking will be incorrect.
|
||||
var dimCount = Math.max(source.dimensionsDetectedCount || 1, sysDims.length, dimsDef.length, optDimCount || 0);
|
||||
each(sysDims, function (sysDimItem) {
|
||||
var sysDimItemDimsDef;
|
||||
if (isObject(sysDimItem) && (sysDimItemDimsDef = sysDimItem.dimsDef)) {
|
||||
dimCount = Math.max(dimCount, sysDimItemDimsDef.length);
|
||||
}
|
||||
});
|
||||
return dimCount;
|
||||
}
|
||||
function genCoordDimName(name, map, fromZero) {
|
||||
if (fromZero || map.hasKey(name)) {
|
||||
var i = 0;
|
||||
while (map.hasKey(name + i)) {
|
||||
i++;
|
||||
}
|
||||
name += i;
|
||||
}
|
||||
map.set(name, true);
|
||||
return name;
|
||||
}
|
343
frontend/node_modules/echarts/lib/data/helper/dataProvider.js
generated
vendored
Normal file
343
frontend/node_modules/echarts/lib/data/helper/dataProvider.js
generated
vendored
Normal file
@ -0,0 +1,343 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
var _a, _b, _c;
|
||||
// TODO
|
||||
// ??? refactor? check the outer usage of data provider.
|
||||
// merge with defaultDimValueGetter?
|
||||
import { isTypedArray, extend, assert, each, isObject, bind } from 'zrender/lib/core/util.js';
|
||||
import { getDataItemValue } from '../../util/model.js';
|
||||
import { createSourceFromSeriesDataOption, isSourceInstance } from '../Source.js';
|
||||
import { SOURCE_FORMAT_ORIGINAL, SOURCE_FORMAT_OBJECT_ROWS, SOURCE_FORMAT_KEYED_COLUMNS, SOURCE_FORMAT_TYPED_ARRAY, SOURCE_FORMAT_ARRAY_ROWS, SERIES_LAYOUT_BY_COLUMN, SERIES_LAYOUT_BY_ROW } from '../../util/types.js';
|
||||
var providerMethods;
|
||||
var mountMethods;
|
||||
/**
|
||||
* If normal array used, mutable chunk size is supported.
|
||||
* If typed array used, chunk size must be fixed.
|
||||
*/
|
||||
var DefaultDataProvider = /** @class */function () {
|
||||
function DefaultDataProvider(sourceParam, dimSize) {
|
||||
// let source: Source;
|
||||
var source = !isSourceInstance(sourceParam) ? createSourceFromSeriesDataOption(sourceParam) : sourceParam;
|
||||
// declare source is Source;
|
||||
this._source = source;
|
||||
var data = this._data = source.data;
|
||||
// Typed array. TODO IE10+?
|
||||
if (source.sourceFormat === SOURCE_FORMAT_TYPED_ARRAY) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (dimSize == null) {
|
||||
throw new Error('Typed array data must specify dimension size');
|
||||
}
|
||||
}
|
||||
this._offset = 0;
|
||||
this._dimSize = dimSize;
|
||||
this._data = data;
|
||||
}
|
||||
mountMethods(this, data, source);
|
||||
}
|
||||
DefaultDataProvider.prototype.getSource = function () {
|
||||
return this._source;
|
||||
};
|
||||
DefaultDataProvider.prototype.count = function () {
|
||||
return 0;
|
||||
};
|
||||
DefaultDataProvider.prototype.getItem = function (idx, out) {
|
||||
return;
|
||||
};
|
||||
DefaultDataProvider.prototype.appendData = function (newData) {};
|
||||
DefaultDataProvider.prototype.clean = function () {};
|
||||
DefaultDataProvider.protoInitialize = function () {
|
||||
// PENDING: To avoid potential incompat (e.g., prototype
|
||||
// is visited somewhere), still init them on prototype.
|
||||
var proto = DefaultDataProvider.prototype;
|
||||
proto.pure = false;
|
||||
proto.persistent = true;
|
||||
}();
|
||||
DefaultDataProvider.internalField = function () {
|
||||
var _a;
|
||||
mountMethods = function (provider, data, source) {
|
||||
var sourceFormat = source.sourceFormat;
|
||||
var seriesLayoutBy = source.seriesLayoutBy;
|
||||
var startIndex = source.startIndex;
|
||||
var dimsDef = source.dimensionsDefine;
|
||||
var methods = providerMethods[getMethodMapKey(sourceFormat, seriesLayoutBy)];
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(methods, 'Invalide sourceFormat: ' + sourceFormat);
|
||||
}
|
||||
extend(provider, methods);
|
||||
if (sourceFormat === SOURCE_FORMAT_TYPED_ARRAY) {
|
||||
provider.getItem = getItemForTypedArray;
|
||||
provider.count = countForTypedArray;
|
||||
provider.fillStorage = fillStorageForTypedArray;
|
||||
} else {
|
||||
var rawItemGetter = getRawSourceItemGetter(sourceFormat, seriesLayoutBy);
|
||||
provider.getItem = bind(rawItemGetter, null, data, startIndex, dimsDef);
|
||||
var rawCounter = getRawSourceDataCounter(sourceFormat, seriesLayoutBy);
|
||||
provider.count = bind(rawCounter, null, data, startIndex, dimsDef);
|
||||
}
|
||||
};
|
||||
var getItemForTypedArray = function (idx, out) {
|
||||
idx = idx - this._offset;
|
||||
out = out || [];
|
||||
var data = this._data;
|
||||
var dimSize = this._dimSize;
|
||||
var offset = dimSize * idx;
|
||||
for (var i = 0; i < dimSize; i++) {
|
||||
out[i] = data[offset + i];
|
||||
}
|
||||
return out;
|
||||
};
|
||||
var fillStorageForTypedArray = function (start, end, storage, extent) {
|
||||
var data = this._data;
|
||||
var dimSize = this._dimSize;
|
||||
for (var dim = 0; dim < dimSize; dim++) {
|
||||
var dimExtent = extent[dim];
|
||||
var min = dimExtent[0] == null ? Infinity : dimExtent[0];
|
||||
var max = dimExtent[1] == null ? -Infinity : dimExtent[1];
|
||||
var count = end - start;
|
||||
var arr = storage[dim];
|
||||
for (var i = 0; i < count; i++) {
|
||||
// appendData with TypedArray will always do replace in provider.
|
||||
var val = data[i * dimSize + dim];
|
||||
arr[start + i] = val;
|
||||
val < min && (min = val);
|
||||
val > max && (max = val);
|
||||
}
|
||||
dimExtent[0] = min;
|
||||
dimExtent[1] = max;
|
||||
}
|
||||
};
|
||||
var countForTypedArray = function () {
|
||||
return this._data ? this._data.length / this._dimSize : 0;
|
||||
};
|
||||
providerMethods = (_a = {}, _a[SOURCE_FORMAT_ARRAY_ROWS + '_' + SERIES_LAYOUT_BY_COLUMN] = {
|
||||
pure: true,
|
||||
appendData: appendDataSimply
|
||||
}, _a[SOURCE_FORMAT_ARRAY_ROWS + '_' + SERIES_LAYOUT_BY_ROW] = {
|
||||
pure: true,
|
||||
appendData: function () {
|
||||
throw new Error('Do not support appendData when set seriesLayoutBy: "row".');
|
||||
}
|
||||
}, _a[SOURCE_FORMAT_OBJECT_ROWS] = {
|
||||
pure: true,
|
||||
appendData: appendDataSimply
|
||||
}, _a[SOURCE_FORMAT_KEYED_COLUMNS] = {
|
||||
pure: true,
|
||||
appendData: function (newData) {
|
||||
var data = this._data;
|
||||
each(newData, function (newCol, key) {
|
||||
var oldCol = data[key] || (data[key] = []);
|
||||
for (var i = 0; i < (newCol || []).length; i++) {
|
||||
oldCol.push(newCol[i]);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, _a[SOURCE_FORMAT_ORIGINAL] = {
|
||||
appendData: appendDataSimply
|
||||
}, _a[SOURCE_FORMAT_TYPED_ARRAY] = {
|
||||
persistent: false,
|
||||
pure: true,
|
||||
appendData: function (newData) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(isTypedArray(newData), 'Added data must be TypedArray if data in initialization is TypedArray');
|
||||
}
|
||||
this._data = newData;
|
||||
},
|
||||
// Clean self if data is already used.
|
||||
clean: function () {
|
||||
// PENDING
|
||||
this._offset += this.count();
|
||||
this._data = null;
|
||||
}
|
||||
}, _a);
|
||||
function appendDataSimply(newData) {
|
||||
for (var i = 0; i < newData.length; i++) {
|
||||
this._data.push(newData[i]);
|
||||
}
|
||||
}
|
||||
}();
|
||||
return DefaultDataProvider;
|
||||
}();
|
||||
export { DefaultDataProvider };
|
||||
var getItemSimply = function (rawData, startIndex, dimsDef, idx) {
|
||||
return rawData[idx];
|
||||
};
|
||||
var rawSourceItemGetterMap = (_a = {}, _a[SOURCE_FORMAT_ARRAY_ROWS + '_' + SERIES_LAYOUT_BY_COLUMN] = function (rawData, startIndex, dimsDef, idx) {
|
||||
return rawData[idx + startIndex];
|
||||
}, _a[SOURCE_FORMAT_ARRAY_ROWS + '_' + SERIES_LAYOUT_BY_ROW] = function (rawData, startIndex, dimsDef, idx, out) {
|
||||
idx += startIndex;
|
||||
var item = out || [];
|
||||
var data = rawData;
|
||||
for (var i = 0; i < data.length; i++) {
|
||||
var row = data[i];
|
||||
item[i] = row ? row[idx] : null;
|
||||
}
|
||||
return item;
|
||||
}, _a[SOURCE_FORMAT_OBJECT_ROWS] = getItemSimply, _a[SOURCE_FORMAT_KEYED_COLUMNS] = function (rawData, startIndex, dimsDef, idx, out) {
|
||||
var item = out || [];
|
||||
for (var i = 0; i < dimsDef.length; i++) {
|
||||
var dimName = dimsDef[i].name;
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (dimName == null) {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
var col = rawData[dimName];
|
||||
item[i] = col ? col[idx] : null;
|
||||
}
|
||||
return item;
|
||||
}, _a[SOURCE_FORMAT_ORIGINAL] = getItemSimply, _a);
|
||||
export function getRawSourceItemGetter(sourceFormat, seriesLayoutBy) {
|
||||
var method = rawSourceItemGetterMap[getMethodMapKey(sourceFormat, seriesLayoutBy)];
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(method, 'Do not support get item on "' + sourceFormat + '", "' + seriesLayoutBy + '".');
|
||||
}
|
||||
return method;
|
||||
}
|
||||
var countSimply = function (rawData, startIndex, dimsDef) {
|
||||
return rawData.length;
|
||||
};
|
||||
var rawSourceDataCounterMap = (_b = {}, _b[SOURCE_FORMAT_ARRAY_ROWS + '_' + SERIES_LAYOUT_BY_COLUMN] = function (rawData, startIndex, dimsDef) {
|
||||
return Math.max(0, rawData.length - startIndex);
|
||||
}, _b[SOURCE_FORMAT_ARRAY_ROWS + '_' + SERIES_LAYOUT_BY_ROW] = function (rawData, startIndex, dimsDef) {
|
||||
var row = rawData[0];
|
||||
return row ? Math.max(0, row.length - startIndex) : 0;
|
||||
}, _b[SOURCE_FORMAT_OBJECT_ROWS] = countSimply, _b[SOURCE_FORMAT_KEYED_COLUMNS] = function (rawData, startIndex, dimsDef) {
|
||||
var dimName = dimsDef[0].name;
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (dimName == null) {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
var col = rawData[dimName];
|
||||
return col ? col.length : 0;
|
||||
}, _b[SOURCE_FORMAT_ORIGINAL] = countSimply, _b);
|
||||
export function getRawSourceDataCounter(sourceFormat, seriesLayoutBy) {
|
||||
var method = rawSourceDataCounterMap[getMethodMapKey(sourceFormat, seriesLayoutBy)];
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(method, 'Do not support count on "' + sourceFormat + '", "' + seriesLayoutBy + '".');
|
||||
}
|
||||
return method;
|
||||
}
|
||||
var getRawValueSimply = function (dataItem, dimIndex, property) {
|
||||
return dataItem[dimIndex];
|
||||
};
|
||||
var rawSourceValueGetterMap = (_c = {}, _c[SOURCE_FORMAT_ARRAY_ROWS] = getRawValueSimply, _c[SOURCE_FORMAT_OBJECT_ROWS] = function (dataItem, dimIndex, property) {
|
||||
return dataItem[property];
|
||||
}, _c[SOURCE_FORMAT_KEYED_COLUMNS] = getRawValueSimply, _c[SOURCE_FORMAT_ORIGINAL] = function (dataItem, dimIndex, property) {
|
||||
// FIXME: In some case (markpoint in geo (geo-map.html)),
|
||||
// dataItem is {coord: [...]}
|
||||
var value = getDataItemValue(dataItem);
|
||||
return !(value instanceof Array) ? value : value[dimIndex];
|
||||
}, _c[SOURCE_FORMAT_TYPED_ARRAY] = getRawValueSimply, _c);
|
||||
export function getRawSourceValueGetter(sourceFormat) {
|
||||
var method = rawSourceValueGetterMap[sourceFormat];
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(method, 'Do not support get value on "' + sourceFormat + '".');
|
||||
}
|
||||
return method;
|
||||
}
|
||||
function getMethodMapKey(sourceFormat, seriesLayoutBy) {
|
||||
return sourceFormat === SOURCE_FORMAT_ARRAY_ROWS ? sourceFormat + '_' + seriesLayoutBy : sourceFormat;
|
||||
}
|
||||
// ??? FIXME can these logic be more neat: getRawValue, getRawDataItem,
|
||||
// Consider persistent.
|
||||
// Caution: why use raw value to display on label or tooltip?
|
||||
// A reason is to avoid format. For example time value we do not know
|
||||
// how to format is expected. More over, if stack is used, calculated
|
||||
// value may be 0.91000000001, which have brings trouble to display.
|
||||
// TODO: consider how to treat null/undefined/NaN when display?
|
||||
export function retrieveRawValue(data, dataIndex,
|
||||
// If dimIndex is null/undefined, return OptionDataItem.
|
||||
// Otherwise, return OptionDataValue.
|
||||
dim) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
// Consider data may be not persistent.
|
||||
var dataItem = data.getRawDataItem(dataIndex);
|
||||
if (dataItem == null) {
|
||||
return;
|
||||
}
|
||||
var store = data.getStore();
|
||||
var sourceFormat = store.getSource().sourceFormat;
|
||||
if (dim != null) {
|
||||
var dimIndex = data.getDimensionIndex(dim);
|
||||
var property = store.getDimensionProperty(dimIndex);
|
||||
return getRawSourceValueGetter(sourceFormat)(dataItem, dimIndex, property);
|
||||
} else {
|
||||
var result = dataItem;
|
||||
if (sourceFormat === SOURCE_FORMAT_ORIGINAL) {
|
||||
result = getDataItemValue(dataItem);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Compatible with some cases (in pie, map) like:
|
||||
* data: [{name: 'xx', value: 5, selected: true}, ...]
|
||||
* where only sourceFormat is 'original' and 'objectRows' supported.
|
||||
*
|
||||
* // TODO
|
||||
* Supported detail options in data item when using 'arrayRows'.
|
||||
*
|
||||
* @param data
|
||||
* @param dataIndex
|
||||
* @param attr like 'selected'
|
||||
*/
|
||||
export function retrieveRawAttr(data, dataIndex, attr) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
var sourceFormat = data.getStore().getSource().sourceFormat;
|
||||
if (sourceFormat !== SOURCE_FORMAT_ORIGINAL && sourceFormat !== SOURCE_FORMAT_OBJECT_ROWS) {
|
||||
return;
|
||||
}
|
||||
var dataItem = data.getRawDataItem(dataIndex);
|
||||
if (sourceFormat === SOURCE_FORMAT_ORIGINAL && !isObject(dataItem)) {
|
||||
dataItem = null;
|
||||
}
|
||||
if (dataItem) {
|
||||
return dataItem[attr];
|
||||
}
|
||||
}
|
179
frontend/node_modules/echarts/lib/data/helper/dataStackHelper.js
generated
vendored
Normal file
179
frontend/node_modules/echarts/lib/data/helper/dataStackHelper.js
generated
vendored
Normal file
@ -0,0 +1,179 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { each, isString } from 'zrender/lib/core/util.js';
|
||||
import { isSeriesDataSchema } from './SeriesDataSchema.js';
|
||||
/**
|
||||
* Note that it is too complicated to support 3d stack by value
|
||||
* (have to create two-dimension inverted index), so in 3d case
|
||||
* we just support that stacked by index.
|
||||
*
|
||||
* @param seriesModel
|
||||
* @param dimensionsInput The same as the input of <module:echarts/data/SeriesData>.
|
||||
* The input will be modified.
|
||||
* @param opt
|
||||
* @param opt.stackedCoordDimension Specify a coord dimension if needed.
|
||||
* @param opt.byIndex=false
|
||||
* @return calculationInfo
|
||||
* {
|
||||
* stackedDimension: string
|
||||
* stackedByDimension: string
|
||||
* isStackedByIndex: boolean
|
||||
* stackedOverDimension: string
|
||||
* stackResultDimension: string
|
||||
* }
|
||||
*/
|
||||
export function enableDataStack(seriesModel, dimensionsInput, opt) {
|
||||
opt = opt || {};
|
||||
var byIndex = opt.byIndex;
|
||||
var stackedCoordDimension = opt.stackedCoordDimension;
|
||||
var dimensionDefineList;
|
||||
var schema;
|
||||
var store;
|
||||
if (isLegacyDimensionsInput(dimensionsInput)) {
|
||||
dimensionDefineList = dimensionsInput;
|
||||
} else {
|
||||
schema = dimensionsInput.schema;
|
||||
dimensionDefineList = schema.dimensions;
|
||||
store = dimensionsInput.store;
|
||||
}
|
||||
// Compatibal: when `stack` is set as '', do not stack.
|
||||
var mayStack = !!(seriesModel && seriesModel.get('stack'));
|
||||
var stackedByDimInfo;
|
||||
var stackedDimInfo;
|
||||
var stackResultDimension;
|
||||
var stackedOverDimension;
|
||||
each(dimensionDefineList, function (dimensionInfo, index) {
|
||||
if (isString(dimensionInfo)) {
|
||||
dimensionDefineList[index] = dimensionInfo = {
|
||||
name: dimensionInfo
|
||||
};
|
||||
}
|
||||
if (mayStack && !dimensionInfo.isExtraCoord) {
|
||||
// Find the first ordinal dimension as the stackedByDimInfo.
|
||||
if (!byIndex && !stackedByDimInfo && dimensionInfo.ordinalMeta) {
|
||||
stackedByDimInfo = dimensionInfo;
|
||||
}
|
||||
// Find the first stackable dimension as the stackedDimInfo.
|
||||
if (!stackedDimInfo && dimensionInfo.type !== 'ordinal' && dimensionInfo.type !== 'time' && (!stackedCoordDimension || stackedCoordDimension === dimensionInfo.coordDim)) {
|
||||
stackedDimInfo = dimensionInfo;
|
||||
}
|
||||
}
|
||||
});
|
||||
if (stackedDimInfo && !byIndex && !stackedByDimInfo) {
|
||||
// Compatible with previous design, value axis (time axis) only stack by index.
|
||||
// It may make sense if the user provides elaborately constructed data.
|
||||
byIndex = true;
|
||||
}
|
||||
// Add stack dimension, they can be both calculated by coordinate system in `unionExtent`.
|
||||
// That put stack logic in List is for using conveniently in echarts extensions, but it
|
||||
// might not be a good way.
|
||||
if (stackedDimInfo) {
|
||||
// Use a weird name that not duplicated with other names.
|
||||
// Also need to use seriesModel.id as postfix because different
|
||||
// series may share same data store. The stack dimension needs to be distinguished.
|
||||
stackResultDimension = '__\0ecstackresult_' + seriesModel.id;
|
||||
stackedOverDimension = '__\0ecstackedover_' + seriesModel.id;
|
||||
// Create inverted index to fast query index by value.
|
||||
if (stackedByDimInfo) {
|
||||
stackedByDimInfo.createInvertedIndices = true;
|
||||
}
|
||||
var stackedDimCoordDim_1 = stackedDimInfo.coordDim;
|
||||
var stackedDimType = stackedDimInfo.type;
|
||||
var stackedDimCoordIndex_1 = 0;
|
||||
each(dimensionDefineList, function (dimensionInfo) {
|
||||
if (dimensionInfo.coordDim === stackedDimCoordDim_1) {
|
||||
stackedDimCoordIndex_1++;
|
||||
}
|
||||
});
|
||||
var stackedOverDimensionDefine = {
|
||||
name: stackResultDimension,
|
||||
coordDim: stackedDimCoordDim_1,
|
||||
coordDimIndex: stackedDimCoordIndex_1,
|
||||
type: stackedDimType,
|
||||
isExtraCoord: true,
|
||||
isCalculationCoord: true,
|
||||
storeDimIndex: dimensionDefineList.length
|
||||
};
|
||||
var stackResultDimensionDefine = {
|
||||
name: stackedOverDimension,
|
||||
// This dimension contains stack base (generally, 0), so do not set it as
|
||||
// `stackedDimCoordDim` to avoid extent calculation, consider log scale.
|
||||
coordDim: stackedOverDimension,
|
||||
coordDimIndex: stackedDimCoordIndex_1 + 1,
|
||||
type: stackedDimType,
|
||||
isExtraCoord: true,
|
||||
isCalculationCoord: true,
|
||||
storeDimIndex: dimensionDefineList.length + 1
|
||||
};
|
||||
if (schema) {
|
||||
if (store) {
|
||||
stackedOverDimensionDefine.storeDimIndex = store.ensureCalculationDimension(stackedOverDimension, stackedDimType);
|
||||
stackResultDimensionDefine.storeDimIndex = store.ensureCalculationDimension(stackResultDimension, stackedDimType);
|
||||
}
|
||||
schema.appendCalculationDimension(stackedOverDimensionDefine);
|
||||
schema.appendCalculationDimension(stackResultDimensionDefine);
|
||||
} else {
|
||||
dimensionDefineList.push(stackedOverDimensionDefine);
|
||||
dimensionDefineList.push(stackResultDimensionDefine);
|
||||
}
|
||||
}
|
||||
return {
|
||||
stackedDimension: stackedDimInfo && stackedDimInfo.name,
|
||||
stackedByDimension: stackedByDimInfo && stackedByDimInfo.name,
|
||||
isStackedByIndex: byIndex,
|
||||
stackedOverDimension: stackedOverDimension,
|
||||
stackResultDimension: stackResultDimension
|
||||
};
|
||||
}
|
||||
function isLegacyDimensionsInput(dimensionsInput) {
|
||||
return !isSeriesDataSchema(dimensionsInput.schema);
|
||||
}
|
||||
export function isDimensionStacked(data, stackedDim) {
|
||||
// Each single series only maps to one pair of axis. So we do not need to
|
||||
// check stackByDim, whatever stacked by a dimension or stacked by index.
|
||||
return !!stackedDim && stackedDim === data.getCalculationInfo('stackedDimension');
|
||||
}
|
||||
export function getStackedDimension(data, targetDim) {
|
||||
return isDimensionStacked(data, targetDim) ? data.getCalculationInfo('stackResultDimension') : targetDim;
|
||||
}
|
238
frontend/node_modules/echarts/lib/data/helper/dataValueHelper.js
generated
vendored
Normal file
238
frontend/node_modules/echarts/lib/data/helper/dataValueHelper.js
generated
vendored
Normal file
@ -0,0 +1,238 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { parseDate, numericToNumber } from '../../util/number.js';
|
||||
import { createHashMap, trim, hasOwn, isString, isNumber } from 'zrender/lib/core/util.js';
|
||||
import { throwError } from '../../util/log.js';
|
||||
/**
|
||||
* Convert raw the value in to inner value in List.
|
||||
*
|
||||
* [Performance sensitive]
|
||||
*
|
||||
* [Caution]: this is the key logic of user value parser.
|
||||
* For backward compatibility, do not modify it until you have to!
|
||||
*/
|
||||
export function parseDataValue(value,
|
||||
// For high performance, do not omit the second param.
|
||||
opt) {
|
||||
// Performance sensitive.
|
||||
var dimType = opt && opt.type;
|
||||
if (dimType === 'ordinal') {
|
||||
// If given value is a category string
|
||||
return value;
|
||||
}
|
||||
if (dimType === 'time'
|
||||
// spead up when using timestamp
|
||||
&& !isNumber(value) && value != null && value !== '-') {
|
||||
value = +parseDate(value);
|
||||
}
|
||||
// dimType defaults 'number'.
|
||||
// If dimType is not ordinal and value is null or undefined or NaN or '-',
|
||||
// parse to NaN.
|
||||
// number-like string (like ' 123 ') can be converted to a number.
|
||||
// where null/undefined or other string will be converted to NaN.
|
||||
return value == null || value === '' ? NaN
|
||||
// If string (like '-'), using '+' parse to NaN
|
||||
// If object, also parse to NaN
|
||||
: Number(value);
|
||||
}
|
||||
;
|
||||
var valueParserMap = createHashMap({
|
||||
'number': function (val) {
|
||||
// Do not use `numericToNumber` here. We have `numericToNumber` by default.
|
||||
// Here the number parser can have loose rule:
|
||||
// enable to cut suffix: "120px" => 120, "14%" => 14.
|
||||
return parseFloat(val);
|
||||
},
|
||||
'time': function (val) {
|
||||
// return timestamp.
|
||||
return +parseDate(val);
|
||||
},
|
||||
'trim': function (val) {
|
||||
return isString(val) ? trim(val) : val;
|
||||
}
|
||||
});
|
||||
export function getRawValueParser(type) {
|
||||
return valueParserMap.get(type);
|
||||
}
|
||||
var ORDER_COMPARISON_OP_MAP = {
|
||||
lt: function (lval, rval) {
|
||||
return lval < rval;
|
||||
},
|
||||
lte: function (lval, rval) {
|
||||
return lval <= rval;
|
||||
},
|
||||
gt: function (lval, rval) {
|
||||
return lval > rval;
|
||||
},
|
||||
gte: function (lval, rval) {
|
||||
return lval >= rval;
|
||||
}
|
||||
};
|
||||
var FilterOrderComparator = /** @class */function () {
|
||||
function FilterOrderComparator(op, rval) {
|
||||
if (!isNumber(rval)) {
|
||||
var errMsg = '';
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'rvalue of "<", ">", "<=", ">=" can only be number in filter.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
this._opFn = ORDER_COMPARISON_OP_MAP[op];
|
||||
this._rvalFloat = numericToNumber(rval);
|
||||
}
|
||||
// Performance sensitive.
|
||||
FilterOrderComparator.prototype.evaluate = function (lval) {
|
||||
// Most cases is 'number', and typeof maybe 10 times faseter than parseFloat.
|
||||
return isNumber(lval) ? this._opFn(lval, this._rvalFloat) : this._opFn(numericToNumber(lval), this._rvalFloat);
|
||||
};
|
||||
return FilterOrderComparator;
|
||||
}();
|
||||
var SortOrderComparator = /** @class */function () {
|
||||
/**
|
||||
* @param order by default: 'asc'
|
||||
* @param incomparable by default: Always on the tail.
|
||||
* That is, if 'asc' => 'max', if 'desc' => 'min'
|
||||
* See the definition of "incomparable" in [SORT_COMPARISON_RULE].
|
||||
*/
|
||||
function SortOrderComparator(order, incomparable) {
|
||||
var isDesc = order === 'desc';
|
||||
this._resultLT = isDesc ? 1 : -1;
|
||||
if (incomparable == null) {
|
||||
incomparable = isDesc ? 'min' : 'max';
|
||||
}
|
||||
this._incomparable = incomparable === 'min' ? -Infinity : Infinity;
|
||||
}
|
||||
// See [SORT_COMPARISON_RULE].
|
||||
// Performance sensitive.
|
||||
SortOrderComparator.prototype.evaluate = function (lval, rval) {
|
||||
// Most cases is 'number', and typeof maybe 10 times faseter than parseFloat.
|
||||
var lvalFloat = isNumber(lval) ? lval : numericToNumber(lval);
|
||||
var rvalFloat = isNumber(rval) ? rval : numericToNumber(rval);
|
||||
var lvalNotNumeric = isNaN(lvalFloat);
|
||||
var rvalNotNumeric = isNaN(rvalFloat);
|
||||
if (lvalNotNumeric) {
|
||||
lvalFloat = this._incomparable;
|
||||
}
|
||||
if (rvalNotNumeric) {
|
||||
rvalFloat = this._incomparable;
|
||||
}
|
||||
if (lvalNotNumeric && rvalNotNumeric) {
|
||||
var lvalIsStr = isString(lval);
|
||||
var rvalIsStr = isString(rval);
|
||||
if (lvalIsStr) {
|
||||
lvalFloat = rvalIsStr ? lval : 0;
|
||||
}
|
||||
if (rvalIsStr) {
|
||||
rvalFloat = lvalIsStr ? rval : 0;
|
||||
}
|
||||
}
|
||||
return lvalFloat < rvalFloat ? this._resultLT : lvalFloat > rvalFloat ? -this._resultLT : 0;
|
||||
};
|
||||
return SortOrderComparator;
|
||||
}();
|
||||
export { SortOrderComparator };
|
||||
var FilterEqualityComparator = /** @class */function () {
|
||||
function FilterEqualityComparator(isEq, rval) {
|
||||
this._rval = rval;
|
||||
this._isEQ = isEq;
|
||||
this._rvalTypeof = typeof rval;
|
||||
this._rvalFloat = numericToNumber(rval);
|
||||
}
|
||||
// Performance sensitive.
|
||||
FilterEqualityComparator.prototype.evaluate = function (lval) {
|
||||
var eqResult = lval === this._rval;
|
||||
if (!eqResult) {
|
||||
var lvalTypeof = typeof lval;
|
||||
if (lvalTypeof !== this._rvalTypeof && (lvalTypeof === 'number' || this._rvalTypeof === 'number')) {
|
||||
eqResult = numericToNumber(lval) === this._rvalFloat;
|
||||
}
|
||||
}
|
||||
return this._isEQ ? eqResult : !eqResult;
|
||||
};
|
||||
return FilterEqualityComparator;
|
||||
}();
|
||||
/**
|
||||
* [FILTER_COMPARISON_RULE]
|
||||
* `lt`|`lte`|`gt`|`gte`:
|
||||
* + rval must be a number. And lval will be converted to number (`numericToNumber`) to compare.
|
||||
* `eq`:
|
||||
* + If same type, compare with `===`.
|
||||
* + If there is one number, convert to number (`numericToNumber`) to compare.
|
||||
* + Else return `false`.
|
||||
* `ne`:
|
||||
* + Not `eq`.
|
||||
*
|
||||
*
|
||||
* [SORT_COMPARISON_RULE]
|
||||
* All the values are grouped into three categories:
|
||||
* + "numeric" (number and numeric string)
|
||||
* + "non-numeric-string" (string that excluding numeric string)
|
||||
* + "others"
|
||||
* "numeric" vs "numeric": values are ordered by number order.
|
||||
* "non-numeric-string" vs "non-numeric-string": values are ordered by ES spec (#sec-abstract-relational-comparison).
|
||||
* "others" vs "others": do not change order (always return 0).
|
||||
* "numeric" vs "non-numeric-string": "non-numeric-string" is treated as "incomparable".
|
||||
* "number" vs "others": "others" is treated as "incomparable".
|
||||
* "non-numeric-string" vs "others": "others" is treated as "incomparable".
|
||||
* "incomparable" will be seen as -Infinity or Infinity (depends on the settings).
|
||||
* MEMO:
|
||||
* Non-numeric string sort makes sense when we need to put the items with the same tag together.
|
||||
* But if we support string sort, we still need to avoid the misleading like `'2' > '12'`,
|
||||
* So we treat "numeric-string" sorted by number order rather than string comparison.
|
||||
*
|
||||
*
|
||||
* [CHECK_LIST_OF_THE_RULE_DESIGN]
|
||||
* + Do not support string comparison until required. And also need to
|
||||
* avoid the misleading of "2" > "12".
|
||||
* + Should avoid the misleading case:
|
||||
* `" 22 " gte "22"` is `true` but `" 22 " eq "22"` is `false`.
|
||||
* + JS bad case should be avoided: null <= 0, [] <= 0, ' ' <= 0, ...
|
||||
* + Only "numeric" can be converted to comparable number, otherwise converted to NaN.
|
||||
* See `util/number.ts#numericToNumber`.
|
||||
*
|
||||
* @return If `op` is not `RelationalOperator`, return null;
|
||||
*/
|
||||
export function createFilterComparator(op, rval) {
|
||||
return op === 'eq' || op === 'ne' ? new FilterEqualityComparator(op === 'eq', rval) : hasOwn(ORDER_COMPARISON_OP_MAP, op) ? new FilterOrderComparator(op, rval) : null;
|
||||
}
|
172
frontend/node_modules/echarts/lib/data/helper/dimensionHelper.js
generated
vendored
Normal file
172
frontend/node_modules/echarts/lib/data/helper/dimensionHelper.js
generated
vendored
Normal file
@ -0,0 +1,172 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { each, createHashMap, assert, map } from 'zrender/lib/core/util.js';
|
||||
import { VISUAL_DIMENSIONS } from '../../util/types.js';
|
||||
var DimensionUserOuput = /** @class */function () {
|
||||
function DimensionUserOuput(encode, dimRequest) {
|
||||
this._encode = encode;
|
||||
this._schema = dimRequest;
|
||||
}
|
||||
DimensionUserOuput.prototype.get = function () {
|
||||
return {
|
||||
// Do not generate full dimension name until fist used.
|
||||
fullDimensions: this._getFullDimensionNames(),
|
||||
encode: this._encode
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Get all data store dimension names.
|
||||
* Theoretically a series data store is defined both by series and used dataset (if any).
|
||||
* If some dimensions are omitted for performance reason in `this.dimensions`,
|
||||
* the dimension name may not be auto-generated if user does not specify a dimension name.
|
||||
* In this case, the dimension name is `null`/`undefined`.
|
||||
*/
|
||||
DimensionUserOuput.prototype._getFullDimensionNames = function () {
|
||||
if (!this._cachedDimNames) {
|
||||
this._cachedDimNames = this._schema ? this._schema.makeOutputDimensionNames() : [];
|
||||
}
|
||||
return this._cachedDimNames;
|
||||
};
|
||||
return DimensionUserOuput;
|
||||
}();
|
||||
;
|
||||
export function summarizeDimensions(data, schema) {
|
||||
var summary = {};
|
||||
var encode = summary.encode = {};
|
||||
var notExtraCoordDimMap = createHashMap();
|
||||
var defaultedLabel = [];
|
||||
var defaultedTooltip = [];
|
||||
var userOutputEncode = {};
|
||||
each(data.dimensions, function (dimName) {
|
||||
var dimItem = data.getDimensionInfo(dimName);
|
||||
var coordDim = dimItem.coordDim;
|
||||
if (coordDim) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(VISUAL_DIMENSIONS.get(coordDim) == null);
|
||||
}
|
||||
var coordDimIndex = dimItem.coordDimIndex;
|
||||
getOrCreateEncodeArr(encode, coordDim)[coordDimIndex] = dimName;
|
||||
if (!dimItem.isExtraCoord) {
|
||||
notExtraCoordDimMap.set(coordDim, 1);
|
||||
// Use the last coord dim (and label friendly) as default label,
|
||||
// because when dataset is used, it is hard to guess which dimension
|
||||
// can be value dimension. If both show x, y on label is not look good,
|
||||
// and conventionally y axis is focused more.
|
||||
if (mayLabelDimType(dimItem.type)) {
|
||||
defaultedLabel[0] = dimName;
|
||||
}
|
||||
// User output encode do not contain generated coords.
|
||||
// And it only has index. User can use index to retrieve value from the raw item array.
|
||||
getOrCreateEncodeArr(userOutputEncode, coordDim)[coordDimIndex] = data.getDimensionIndex(dimItem.name);
|
||||
}
|
||||
if (dimItem.defaultTooltip) {
|
||||
defaultedTooltip.push(dimName);
|
||||
}
|
||||
}
|
||||
VISUAL_DIMENSIONS.each(function (v, otherDim) {
|
||||
var encodeArr = getOrCreateEncodeArr(encode, otherDim);
|
||||
var dimIndex = dimItem.otherDims[otherDim];
|
||||
if (dimIndex != null && dimIndex !== false) {
|
||||
encodeArr[dimIndex] = dimItem.name;
|
||||
}
|
||||
});
|
||||
});
|
||||
var dataDimsOnCoord = [];
|
||||
var encodeFirstDimNotExtra = {};
|
||||
notExtraCoordDimMap.each(function (v, coordDim) {
|
||||
var dimArr = encode[coordDim];
|
||||
encodeFirstDimNotExtra[coordDim] = dimArr[0];
|
||||
// Not necessary to remove duplicate, because a data
|
||||
// dim canot on more than one coordDim.
|
||||
dataDimsOnCoord = dataDimsOnCoord.concat(dimArr);
|
||||
});
|
||||
summary.dataDimsOnCoord = dataDimsOnCoord;
|
||||
summary.dataDimIndicesOnCoord = map(dataDimsOnCoord, function (dimName) {
|
||||
return data.getDimensionInfo(dimName).storeDimIndex;
|
||||
});
|
||||
summary.encodeFirstDimNotExtra = encodeFirstDimNotExtra;
|
||||
var encodeLabel = encode.label;
|
||||
// FIXME `encode.label` is not recommended, because formatter cannot be set
|
||||
// in this way. Use label.formatter instead. Maybe remove this approach someday.
|
||||
if (encodeLabel && encodeLabel.length) {
|
||||
defaultedLabel = encodeLabel.slice();
|
||||
}
|
||||
var encodeTooltip = encode.tooltip;
|
||||
if (encodeTooltip && encodeTooltip.length) {
|
||||
defaultedTooltip = encodeTooltip.slice();
|
||||
} else if (!defaultedTooltip.length) {
|
||||
defaultedTooltip = defaultedLabel.slice();
|
||||
}
|
||||
encode.defaultedLabel = defaultedLabel;
|
||||
encode.defaultedTooltip = defaultedTooltip;
|
||||
summary.userOutput = new DimensionUserOuput(userOutputEncode, schema);
|
||||
return summary;
|
||||
}
|
||||
function getOrCreateEncodeArr(encode, dim) {
|
||||
if (!encode.hasOwnProperty(dim)) {
|
||||
encode[dim] = [];
|
||||
}
|
||||
return encode[dim];
|
||||
}
|
||||
// FIXME:TS should be type `AxisType`
|
||||
export function getDimensionTypeByAxis(axisType) {
|
||||
return axisType === 'category' ? 'ordinal' : axisType === 'time' ? 'time' : 'float';
|
||||
}
|
||||
function mayLabelDimType(dimType) {
|
||||
// In most cases, ordinal and time do not suitable for label.
|
||||
// Ordinal info can be displayed on axis. Time is too long.
|
||||
return !(dimType === 'ordinal' || dimType === 'time');
|
||||
}
|
||||
// function findTheLastDimMayLabel(data) {
|
||||
// // Get last value dim
|
||||
// let dimensions = data.dimensions.slice();
|
||||
// let valueType;
|
||||
// let valueDim;
|
||||
// while (dimensions.length && (
|
||||
// valueDim = dimensions.pop(),
|
||||
// valueType = data.getDimensionInfo(valueDim).type,
|
||||
// valueType === 'ordinal' || valueType === 'time'
|
||||
// )) {} // jshint ignore:line
|
||||
// return valueDim;
|
||||
// }
|
46
frontend/node_modules/echarts/lib/data/helper/linkList.js
generated
vendored
Normal file
46
frontend/node_modules/echarts/lib/data/helper/linkList.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
// TODO: this module is only for compatibility with echarts-gl
|
||||
import linkSeriesData from './linkSeriesData.js';
|
||||
export default linkSeriesData;
|
150
frontend/node_modules/echarts/lib/data/helper/linkSeriesData.js
generated
vendored
Normal file
150
frontend/node_modules/echarts/lib/data/helper/linkSeriesData.js
generated
vendored
Normal file
@ -0,0 +1,150 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
/**
|
||||
* Link lists and struct (graph or tree)
|
||||
*/
|
||||
import { curry, each, assert, extend, map, keys } from 'zrender/lib/core/util.js';
|
||||
import { makeInner } from '../../util/model.js';
|
||||
var inner = makeInner();
|
||||
function linkSeriesData(opt) {
|
||||
var mainData = opt.mainData;
|
||||
var datas = opt.datas;
|
||||
if (!datas) {
|
||||
datas = {
|
||||
main: mainData
|
||||
};
|
||||
opt.datasAttr = {
|
||||
main: 'data'
|
||||
};
|
||||
}
|
||||
opt.datas = opt.mainData = null;
|
||||
linkAll(mainData, datas, opt);
|
||||
// Porxy data original methods.
|
||||
each(datas, function (data) {
|
||||
each(mainData.TRANSFERABLE_METHODS, function (methodName) {
|
||||
data.wrapMethod(methodName, curry(transferInjection, opt));
|
||||
});
|
||||
});
|
||||
// Beyond transfer, additional features should be added to `cloneShallow`.
|
||||
mainData.wrapMethod('cloneShallow', curry(cloneShallowInjection, opt));
|
||||
// Only mainData trigger change, because struct.update may trigger
|
||||
// another changable methods, which may bring about dead lock.
|
||||
each(mainData.CHANGABLE_METHODS, function (methodName) {
|
||||
mainData.wrapMethod(methodName, curry(changeInjection, opt));
|
||||
});
|
||||
// Make sure datas contains mainData.
|
||||
assert(datas[mainData.dataType] === mainData);
|
||||
}
|
||||
function transferInjection(opt, res) {
|
||||
if (isMainData(this)) {
|
||||
// Transfer datas to new main data.
|
||||
var datas = extend({}, inner(this).datas);
|
||||
datas[this.dataType] = res;
|
||||
linkAll(res, datas, opt);
|
||||
} else {
|
||||
// Modify the reference in main data to point newData.
|
||||
linkSingle(res, this.dataType, inner(this).mainData, opt);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
function changeInjection(opt, res) {
|
||||
opt.struct && opt.struct.update();
|
||||
return res;
|
||||
}
|
||||
function cloneShallowInjection(opt, res) {
|
||||
// cloneShallow, which brings about some fragilities, may be inappropriate
|
||||
// to be exposed as an API. So for implementation simplicity we can make
|
||||
// the restriction that cloneShallow of not-mainData should not be invoked
|
||||
// outside, but only be invoked here.
|
||||
each(inner(res).datas, function (data, dataType) {
|
||||
data !== res && linkSingle(data.cloneShallow(), dataType, res, opt);
|
||||
});
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Supplement method to List.
|
||||
*
|
||||
* @public
|
||||
* @param [dataType] If not specified, return mainData.
|
||||
*/
|
||||
function getLinkedData(dataType) {
|
||||
var mainData = inner(this).mainData;
|
||||
return dataType == null || mainData == null ? mainData : inner(mainData).datas[dataType];
|
||||
}
|
||||
/**
|
||||
* Get list of all linked data
|
||||
*/
|
||||
function getLinkedDataAll() {
|
||||
var mainData = inner(this).mainData;
|
||||
return mainData == null ? [{
|
||||
data: mainData
|
||||
}] : map(keys(inner(mainData).datas), function (type) {
|
||||
return {
|
||||
type: type,
|
||||
data: inner(mainData).datas[type]
|
||||
};
|
||||
});
|
||||
}
|
||||
function isMainData(data) {
|
||||
return inner(data).mainData === data;
|
||||
}
|
||||
function linkAll(mainData, datas, opt) {
|
||||
inner(mainData).datas = {};
|
||||
each(datas, function (data, dataType) {
|
||||
linkSingle(data, dataType, mainData, opt);
|
||||
});
|
||||
}
|
||||
function linkSingle(data, dataType, mainData, opt) {
|
||||
inner(mainData).datas[dataType] = data;
|
||||
inner(data).mainData = mainData;
|
||||
data.dataType = dataType;
|
||||
if (opt.struct) {
|
||||
data[opt.structAttr] = opt.struct;
|
||||
opt.struct[opt.datasAttr[dataType]] = data;
|
||||
}
|
||||
// Supplement method.
|
||||
data.getLinkedData = getLinkedData;
|
||||
data.getLinkedDataAll = getLinkedDataAll;
|
||||
}
|
||||
export default linkSeriesData;
|
353
frontend/node_modules/echarts/lib/data/helper/sourceHelper.js
generated
vendored
Normal file
353
frontend/node_modules/echarts/lib/data/helper/sourceHelper.js
generated
vendored
Normal file
@ -0,0 +1,353 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { makeInner, getDataItemValue, queryReferringComponents, SINGLE_REFERRING } from '../../util/model.js';
|
||||
import { createHashMap, each, isArray, isString, isObject, isTypedArray } from 'zrender/lib/core/util.js';
|
||||
import { SOURCE_FORMAT_ORIGINAL, SOURCE_FORMAT_ARRAY_ROWS, SOURCE_FORMAT_OBJECT_ROWS, SERIES_LAYOUT_BY_ROW, SOURCE_FORMAT_KEYED_COLUMNS } from '../../util/types.js';
|
||||
// The result of `guessOrdinal`.
|
||||
export var BE_ORDINAL = {
|
||||
Must: 1,
|
||||
Might: 2,
|
||||
Not: 3 // Other cases
|
||||
};
|
||||
var innerGlobalModel = makeInner();
|
||||
/**
|
||||
* MUST be called before mergeOption of all series.
|
||||
*/
|
||||
export function resetSourceDefaulter(ecModel) {
|
||||
// `datasetMap` is used to make default encode.
|
||||
innerGlobalModel(ecModel).datasetMap = createHashMap();
|
||||
}
|
||||
/**
|
||||
* [The strategy of the arrengment of data dimensions for dataset]:
|
||||
* "value way": all axes are non-category axes. So series one by one take
|
||||
* several (the number is coordSysDims.length) dimensions from dataset.
|
||||
* The result of data arrengment of data dimensions like:
|
||||
* | ser0_x | ser0_y | ser1_x | ser1_y | ser2_x | ser2_y |
|
||||
* "category way": at least one axis is category axis. So the the first data
|
||||
* dimension is always mapped to the first category axis and shared by
|
||||
* all of the series. The other data dimensions are taken by series like
|
||||
* "value way" does.
|
||||
* The result of data arrengment of data dimensions like:
|
||||
* | ser_shared_x | ser0_y | ser1_y | ser2_y |
|
||||
*
|
||||
* @return encode Never be `null/undefined`.
|
||||
*/
|
||||
export function makeSeriesEncodeForAxisCoordSys(coordDimensions, seriesModel, source) {
|
||||
var encode = {};
|
||||
var datasetModel = querySeriesUpstreamDatasetModel(seriesModel);
|
||||
// Currently only make default when using dataset, util more reqirements occur.
|
||||
if (!datasetModel || !coordDimensions) {
|
||||
return encode;
|
||||
}
|
||||
var encodeItemName = [];
|
||||
var encodeSeriesName = [];
|
||||
var ecModel = seriesModel.ecModel;
|
||||
var datasetMap = innerGlobalModel(ecModel).datasetMap;
|
||||
var key = datasetModel.uid + '_' + source.seriesLayoutBy;
|
||||
var baseCategoryDimIndex;
|
||||
var categoryWayValueDimStart;
|
||||
coordDimensions = coordDimensions.slice();
|
||||
each(coordDimensions, function (coordDimInfoLoose, coordDimIdx) {
|
||||
var coordDimInfo = isObject(coordDimInfoLoose) ? coordDimInfoLoose : coordDimensions[coordDimIdx] = {
|
||||
name: coordDimInfoLoose
|
||||
};
|
||||
if (coordDimInfo.type === 'ordinal' && baseCategoryDimIndex == null) {
|
||||
baseCategoryDimIndex = coordDimIdx;
|
||||
categoryWayValueDimStart = getDataDimCountOnCoordDim(coordDimInfo);
|
||||
}
|
||||
encode[coordDimInfo.name] = [];
|
||||
});
|
||||
var datasetRecord = datasetMap.get(key) || datasetMap.set(key, {
|
||||
categoryWayDim: categoryWayValueDimStart,
|
||||
valueWayDim: 0
|
||||
});
|
||||
// TODO
|
||||
// Auto detect first time axis and do arrangement.
|
||||
each(coordDimensions, function (coordDimInfo, coordDimIdx) {
|
||||
var coordDimName = coordDimInfo.name;
|
||||
var count = getDataDimCountOnCoordDim(coordDimInfo);
|
||||
// In value way.
|
||||
if (baseCategoryDimIndex == null) {
|
||||
var start = datasetRecord.valueWayDim;
|
||||
pushDim(encode[coordDimName], start, count);
|
||||
pushDim(encodeSeriesName, start, count);
|
||||
datasetRecord.valueWayDim += count;
|
||||
// ??? TODO give a better default series name rule?
|
||||
// especially when encode x y specified.
|
||||
// consider: when multiple series share one dimension
|
||||
// category axis, series name should better use
|
||||
// the other dimension name. On the other hand, use
|
||||
// both dimensions name.
|
||||
}
|
||||
// In category way, the first category axis.
|
||||
else if (baseCategoryDimIndex === coordDimIdx) {
|
||||
pushDim(encode[coordDimName], 0, count);
|
||||
pushDim(encodeItemName, 0, count);
|
||||
}
|
||||
// In category way, the other axis.
|
||||
else {
|
||||
var start = datasetRecord.categoryWayDim;
|
||||
pushDim(encode[coordDimName], start, count);
|
||||
pushDim(encodeSeriesName, start, count);
|
||||
datasetRecord.categoryWayDim += count;
|
||||
}
|
||||
});
|
||||
function pushDim(dimIdxArr, idxFrom, idxCount) {
|
||||
for (var i = 0; i < idxCount; i++) {
|
||||
dimIdxArr.push(idxFrom + i);
|
||||
}
|
||||
}
|
||||
function getDataDimCountOnCoordDim(coordDimInfo) {
|
||||
var dimsDef = coordDimInfo.dimsDef;
|
||||
return dimsDef ? dimsDef.length : 1;
|
||||
}
|
||||
encodeItemName.length && (encode.itemName = encodeItemName);
|
||||
encodeSeriesName.length && (encode.seriesName = encodeSeriesName);
|
||||
return encode;
|
||||
}
|
||||
/**
|
||||
* Work for data like [{name: ..., value: ...}, ...].
|
||||
*
|
||||
* @return encode Never be `null/undefined`.
|
||||
*/
|
||||
export function makeSeriesEncodeForNameBased(seriesModel, source, dimCount) {
|
||||
var encode = {};
|
||||
var datasetModel = querySeriesUpstreamDatasetModel(seriesModel);
|
||||
// Currently only make default when using dataset, util more reqirements occur.
|
||||
if (!datasetModel) {
|
||||
return encode;
|
||||
}
|
||||
var sourceFormat = source.sourceFormat;
|
||||
var dimensionsDefine = source.dimensionsDefine;
|
||||
var potentialNameDimIndex;
|
||||
if (sourceFormat === SOURCE_FORMAT_OBJECT_ROWS || sourceFormat === SOURCE_FORMAT_KEYED_COLUMNS) {
|
||||
each(dimensionsDefine, function (dim, idx) {
|
||||
if ((isObject(dim) ? dim.name : dim) === 'name') {
|
||||
potentialNameDimIndex = idx;
|
||||
}
|
||||
});
|
||||
}
|
||||
var idxResult = function () {
|
||||
var idxRes0 = {};
|
||||
var idxRes1 = {};
|
||||
var guessRecords = [];
|
||||
// 5 is an experience value.
|
||||
for (var i = 0, len = Math.min(5, dimCount); i < len; i++) {
|
||||
var guessResult = doGuessOrdinal(source.data, sourceFormat, source.seriesLayoutBy, dimensionsDefine, source.startIndex, i);
|
||||
guessRecords.push(guessResult);
|
||||
var isPureNumber = guessResult === BE_ORDINAL.Not;
|
||||
// [Strategy of idxRes0]: find the first BE_ORDINAL.Not as the value dim,
|
||||
// and then find a name dim with the priority:
|
||||
// "BE_ORDINAL.Might|BE_ORDINAL.Must" > "other dim" > "the value dim itself".
|
||||
if (isPureNumber && idxRes0.v == null && i !== potentialNameDimIndex) {
|
||||
idxRes0.v = i;
|
||||
}
|
||||
if (idxRes0.n == null || idxRes0.n === idxRes0.v || !isPureNumber && guessRecords[idxRes0.n] === BE_ORDINAL.Not) {
|
||||
idxRes0.n = i;
|
||||
}
|
||||
if (fulfilled(idxRes0) && guessRecords[idxRes0.n] !== BE_ORDINAL.Not) {
|
||||
return idxRes0;
|
||||
}
|
||||
// [Strategy of idxRes1]: if idxRes0 not satisfied (that is, no BE_ORDINAL.Not),
|
||||
// find the first BE_ORDINAL.Might as the value dim,
|
||||
// and then find a name dim with the priority:
|
||||
// "other dim" > "the value dim itself".
|
||||
// That is for backward compat: number-like (e.g., `'3'`, `'55'`) can be
|
||||
// treated as number.
|
||||
if (!isPureNumber) {
|
||||
if (guessResult === BE_ORDINAL.Might && idxRes1.v == null && i !== potentialNameDimIndex) {
|
||||
idxRes1.v = i;
|
||||
}
|
||||
if (idxRes1.n == null || idxRes1.n === idxRes1.v) {
|
||||
idxRes1.n = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
function fulfilled(idxResult) {
|
||||
return idxResult.v != null && idxResult.n != null;
|
||||
}
|
||||
return fulfilled(idxRes0) ? idxRes0 : fulfilled(idxRes1) ? idxRes1 : null;
|
||||
}();
|
||||
if (idxResult) {
|
||||
encode.value = [idxResult.v];
|
||||
// `potentialNameDimIndex` has highest priority.
|
||||
var nameDimIndex = potentialNameDimIndex != null ? potentialNameDimIndex : idxResult.n;
|
||||
// By default, label uses itemName in charts.
|
||||
// So we don't set encodeLabel here.
|
||||
encode.itemName = [nameDimIndex];
|
||||
encode.seriesName = [nameDimIndex];
|
||||
}
|
||||
return encode;
|
||||
}
|
||||
/**
|
||||
* @return If return null/undefined, indicate that should not use datasetModel.
|
||||
*/
|
||||
export function querySeriesUpstreamDatasetModel(seriesModel) {
|
||||
// Caution: consider the scenario:
|
||||
// A dataset is declared and a series is not expected to use the dataset,
|
||||
// and at the beginning `setOption({series: { noData })` (just prepare other
|
||||
// option but no data), then `setOption({series: {data: [...]}); In this case,
|
||||
// the user should set an empty array to avoid that dataset is used by default.
|
||||
var thisData = seriesModel.get('data', true);
|
||||
if (!thisData) {
|
||||
return queryReferringComponents(seriesModel.ecModel, 'dataset', {
|
||||
index: seriesModel.get('datasetIndex', true),
|
||||
id: seriesModel.get('datasetId', true)
|
||||
}, SINGLE_REFERRING).models[0];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @return Always return an array event empty.
|
||||
*/
|
||||
export function queryDatasetUpstreamDatasetModels(datasetModel) {
|
||||
// Only these attributes declared, we by default reference to `datasetIndex: 0`.
|
||||
// Otherwise, no reference.
|
||||
if (!datasetModel.get('transform', true) && !datasetModel.get('fromTransformResult', true)) {
|
||||
return [];
|
||||
}
|
||||
return queryReferringComponents(datasetModel.ecModel, 'dataset', {
|
||||
index: datasetModel.get('fromDatasetIndex', true),
|
||||
id: datasetModel.get('fromDatasetId', true)
|
||||
}, SINGLE_REFERRING).models;
|
||||
}
|
||||
/**
|
||||
* The rule should not be complex, otherwise user might not
|
||||
* be able to known where the data is wrong.
|
||||
* The code is ugly, but how to make it neat?
|
||||
*/
|
||||
export function guessOrdinal(source, dimIndex) {
|
||||
return doGuessOrdinal(source.data, source.sourceFormat, source.seriesLayoutBy, source.dimensionsDefine, source.startIndex, dimIndex);
|
||||
}
|
||||
// dimIndex may be overflow source data.
|
||||
// return {BE_ORDINAL}
|
||||
function doGuessOrdinal(data, sourceFormat, seriesLayoutBy, dimensionsDefine, startIndex, dimIndex) {
|
||||
var result;
|
||||
// Experience value.
|
||||
var maxLoop = 5;
|
||||
if (isTypedArray(data)) {
|
||||
return BE_ORDINAL.Not;
|
||||
}
|
||||
// When sourceType is 'objectRows' or 'keyedColumns', dimensionsDefine
|
||||
// always exists in source.
|
||||
var dimName;
|
||||
var dimType;
|
||||
if (dimensionsDefine) {
|
||||
var dimDefItem = dimensionsDefine[dimIndex];
|
||||
if (isObject(dimDefItem)) {
|
||||
dimName = dimDefItem.name;
|
||||
dimType = dimDefItem.type;
|
||||
} else if (isString(dimDefItem)) {
|
||||
dimName = dimDefItem;
|
||||
}
|
||||
}
|
||||
if (dimType != null) {
|
||||
return dimType === 'ordinal' ? BE_ORDINAL.Must : BE_ORDINAL.Not;
|
||||
}
|
||||
if (sourceFormat === SOURCE_FORMAT_ARRAY_ROWS) {
|
||||
var dataArrayRows = data;
|
||||
if (seriesLayoutBy === SERIES_LAYOUT_BY_ROW) {
|
||||
var sample = dataArrayRows[dimIndex];
|
||||
for (var i = 0; i < (sample || []).length && i < maxLoop; i++) {
|
||||
if ((result = detectValue(sample[startIndex + i])) != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (var i = 0; i < dataArrayRows.length && i < maxLoop; i++) {
|
||||
var row = dataArrayRows[startIndex + i];
|
||||
if (row && (result = detectValue(row[dimIndex])) != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (sourceFormat === SOURCE_FORMAT_OBJECT_ROWS) {
|
||||
var dataObjectRows = data;
|
||||
if (!dimName) {
|
||||
return BE_ORDINAL.Not;
|
||||
}
|
||||
for (var i = 0; i < dataObjectRows.length && i < maxLoop; i++) {
|
||||
var item = dataObjectRows[i];
|
||||
if (item && (result = detectValue(item[dimName])) != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} else if (sourceFormat === SOURCE_FORMAT_KEYED_COLUMNS) {
|
||||
var dataKeyedColumns = data;
|
||||
if (!dimName) {
|
||||
return BE_ORDINAL.Not;
|
||||
}
|
||||
var sample = dataKeyedColumns[dimName];
|
||||
if (!sample || isTypedArray(sample)) {
|
||||
return BE_ORDINAL.Not;
|
||||
}
|
||||
for (var i = 0; i < sample.length && i < maxLoop; i++) {
|
||||
if ((result = detectValue(sample[i])) != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} else if (sourceFormat === SOURCE_FORMAT_ORIGINAL) {
|
||||
var dataOriginal = data;
|
||||
for (var i = 0; i < dataOriginal.length && i < maxLoop; i++) {
|
||||
var item = dataOriginal[i];
|
||||
var val = getDataItemValue(item);
|
||||
if (!isArray(val)) {
|
||||
return BE_ORDINAL.Not;
|
||||
}
|
||||
if ((result = detectValue(val[dimIndex])) != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
function detectValue(val) {
|
||||
var beStr = isString(val);
|
||||
// Consider usage convenience, '1', '2' will be treated as "number".
|
||||
// `Number('')` (or any whitespace) is `0`.
|
||||
if (val != null && Number.isFinite(Number(val)) && val !== '') {
|
||||
return beStr ? BE_ORDINAL.Might : BE_ORDINAL.Not;
|
||||
} else if (beStr && val !== '-') {
|
||||
return BE_ORDINAL.Must;
|
||||
}
|
||||
}
|
||||
return BE_ORDINAL.Not;
|
||||
}
|
417
frontend/node_modules/echarts/lib/data/helper/sourceManager.js
generated
vendored
Normal file
417
frontend/node_modules/echarts/lib/data/helper/sourceManager.js
generated
vendored
Normal file
@ -0,0 +1,417 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { setAsPrimitive, map, isTypedArray, assert, each, retrieve2 } from 'zrender/lib/core/util.js';
|
||||
import { createSource, cloneSourceShallow } from '../Source.js';
|
||||
import { SOURCE_FORMAT_TYPED_ARRAY, SOURCE_FORMAT_ORIGINAL } from '../../util/types.js';
|
||||
import { querySeriesUpstreamDatasetModel, queryDatasetUpstreamDatasetModels } from './sourceHelper.js';
|
||||
import { applyDataTransform } from './transform.js';
|
||||
import DataStore from '../DataStore.js';
|
||||
import { DefaultDataProvider } from './dataProvider.js';
|
||||
/**
|
||||
* [REQUIREMENT_MEMO]:
|
||||
* (0) `metaRawOption` means `dimensions`/`sourceHeader`/`seriesLayoutBy` in raw option.
|
||||
* (1) Keep support the feature: `metaRawOption` can be specified both on `series` and
|
||||
* `root-dataset`. Them on `series` has higher priority.
|
||||
* (2) Do not support to set `metaRawOption` on a `non-root-dataset`, because it might
|
||||
* confuse users: whether those props indicate how to visit the upstream source or visit
|
||||
* the transform result source, and some transforms has nothing to do with these props,
|
||||
* and some transforms might have multiple upstream.
|
||||
* (3) Transforms should specify `metaRawOption` in each output, just like they can be
|
||||
* declared in `root-dataset`.
|
||||
* (4) At present only support visit source in `SERIES_LAYOUT_BY_COLUMN` in transforms.
|
||||
* That is for reducing complexity in transforms.
|
||||
* PENDING: Whether to provide transposition transform?
|
||||
*
|
||||
* [IMPLEMENTAION_MEMO]:
|
||||
* "sourceVisitConfig" are calculated from `metaRawOption` and `data`.
|
||||
* They will not be calculated until `source` is about to be visited (to prevent from
|
||||
* duplicate calcuation). `source` is visited only in series and input to transforms.
|
||||
*
|
||||
* [DIMENSION_INHERIT_RULE]:
|
||||
* By default the dimensions are inherited from ancestors, unless a transform return
|
||||
* a new dimensions definition.
|
||||
* Consider the case:
|
||||
* ```js
|
||||
* dataset: [{
|
||||
* source: [ ['Product', 'Sales', 'Prise'], ['Cookies', 321, 44.21], ...]
|
||||
* }, {
|
||||
* transform: { type: 'filter', ... }
|
||||
* }]
|
||||
* dataset: [{
|
||||
* dimension: ['Product', 'Sales', 'Prise'],
|
||||
* source: [ ['Cookies', 321, 44.21], ...]
|
||||
* }, {
|
||||
* transform: { type: 'filter', ... }
|
||||
* }]
|
||||
* ```
|
||||
* The two types of option should have the same behavior after transform.
|
||||
*
|
||||
*
|
||||
* [SCENARIO]:
|
||||
* (1) Provide source data directly:
|
||||
* ```js
|
||||
* series: {
|
||||
* encode: {...},
|
||||
* dimensions: [...]
|
||||
* seriesLayoutBy: 'row',
|
||||
* data: [[...]]
|
||||
* }
|
||||
* ```
|
||||
* (2) Series refer to dataset.
|
||||
* ```js
|
||||
* series: [{
|
||||
* encode: {...}
|
||||
* // Ignore datasetIndex means `datasetIndex: 0`
|
||||
* // and the dimensions defination in dataset is used
|
||||
* }, {
|
||||
* encode: {...},
|
||||
* seriesLayoutBy: 'column',
|
||||
* datasetIndex: 1
|
||||
* }]
|
||||
* ```
|
||||
* (3) dataset transform
|
||||
* ```js
|
||||
* dataset: [{
|
||||
* source: [...]
|
||||
* }, {
|
||||
* source: [...]
|
||||
* }, {
|
||||
* // By default from 0.
|
||||
* transform: { type: 'filter', config: {...} }
|
||||
* }, {
|
||||
* // Piped.
|
||||
* transform: [
|
||||
* { type: 'filter', config: {...} },
|
||||
* { type: 'sort', config: {...} }
|
||||
* ]
|
||||
* }, {
|
||||
* id: 'regressionData',
|
||||
* fromDatasetIndex: 1,
|
||||
* // Third-party transform
|
||||
* transform: { type: 'ecStat:regression', config: {...} }
|
||||
* }, {
|
||||
* // retrieve the extra result.
|
||||
* id: 'regressionFormula',
|
||||
* fromDatasetId: 'regressionData',
|
||||
* fromTransformResult: 1
|
||||
* }]
|
||||
* ```
|
||||
*/
|
||||
var SourceManager = /** @class */function () {
|
||||
function SourceManager(sourceHost) {
|
||||
// Cached source. Do not repeat calculating if not dirty.
|
||||
this._sourceList = [];
|
||||
this._storeList = [];
|
||||
// version sign of each upstream source manager.
|
||||
this._upstreamSignList = [];
|
||||
this._versionSignBase = 0;
|
||||
this._dirty = true;
|
||||
this._sourceHost = sourceHost;
|
||||
}
|
||||
/**
|
||||
* Mark dirty.
|
||||
*/
|
||||
SourceManager.prototype.dirty = function () {
|
||||
this._setLocalSource([], []);
|
||||
this._storeList = [];
|
||||
this._dirty = true;
|
||||
};
|
||||
SourceManager.prototype._setLocalSource = function (sourceList, upstreamSignList) {
|
||||
this._sourceList = sourceList;
|
||||
this._upstreamSignList = upstreamSignList;
|
||||
this._versionSignBase++;
|
||||
if (this._versionSignBase > 9e10) {
|
||||
this._versionSignBase = 0;
|
||||
}
|
||||
};
|
||||
/**
|
||||
* For detecting whether the upstream source is dirty, so that
|
||||
* the local cached source (in `_sourceList`) should be discarded.
|
||||
*/
|
||||
SourceManager.prototype._getVersionSign = function () {
|
||||
return this._sourceHost.uid + '_' + this._versionSignBase;
|
||||
};
|
||||
/**
|
||||
* Always return a source instance. Otherwise throw error.
|
||||
*/
|
||||
SourceManager.prototype.prepareSource = function () {
|
||||
// For the case that call `setOption` multiple time but no data changed,
|
||||
// cache the result source to prevent from repeating transform.
|
||||
if (this._isDirty()) {
|
||||
this._createSource();
|
||||
this._dirty = false;
|
||||
}
|
||||
};
|
||||
SourceManager.prototype._createSource = function () {
|
||||
this._setLocalSource([], []);
|
||||
var sourceHost = this._sourceHost;
|
||||
var upSourceMgrList = this._getUpstreamSourceManagers();
|
||||
var hasUpstream = !!upSourceMgrList.length;
|
||||
var resultSourceList;
|
||||
var upstreamSignList;
|
||||
if (isSeries(sourceHost)) {
|
||||
var seriesModel = sourceHost;
|
||||
var data = void 0;
|
||||
var sourceFormat = void 0;
|
||||
var upSource = void 0;
|
||||
// Has upstream dataset
|
||||
if (hasUpstream) {
|
||||
var upSourceMgr = upSourceMgrList[0];
|
||||
upSourceMgr.prepareSource();
|
||||
upSource = upSourceMgr.getSource();
|
||||
data = upSource.data;
|
||||
sourceFormat = upSource.sourceFormat;
|
||||
upstreamSignList = [upSourceMgr._getVersionSign()];
|
||||
}
|
||||
// Series data is from own.
|
||||
else {
|
||||
data = seriesModel.get('data', true);
|
||||
sourceFormat = isTypedArray(data) ? SOURCE_FORMAT_TYPED_ARRAY : SOURCE_FORMAT_ORIGINAL;
|
||||
upstreamSignList = [];
|
||||
}
|
||||
// See [REQUIREMENT_MEMO], merge settings on series and parent dataset if it is root.
|
||||
var newMetaRawOption = this._getSourceMetaRawOption() || {};
|
||||
var upMetaRawOption = upSource && upSource.metaRawOption || {};
|
||||
var seriesLayoutBy = retrieve2(newMetaRawOption.seriesLayoutBy, upMetaRawOption.seriesLayoutBy) || null;
|
||||
var sourceHeader = retrieve2(newMetaRawOption.sourceHeader, upMetaRawOption.sourceHeader);
|
||||
// Note here we should not use `upSource.dimensionsDefine`. Consider the case:
|
||||
// `upSource.dimensionsDefine` is detected by `seriesLayoutBy: 'column'`,
|
||||
// but series need `seriesLayoutBy: 'row'`.
|
||||
var dimensions = retrieve2(newMetaRawOption.dimensions, upMetaRawOption.dimensions);
|
||||
// We share source with dataset as much as possible
|
||||
// to avoid extra memory cost of high dimensional data.
|
||||
var needsCreateSource = seriesLayoutBy !== upMetaRawOption.seriesLayoutBy || !!sourceHeader !== !!upMetaRawOption.sourceHeader || dimensions;
|
||||
resultSourceList = needsCreateSource ? [createSource(data, {
|
||||
seriesLayoutBy: seriesLayoutBy,
|
||||
sourceHeader: sourceHeader,
|
||||
dimensions: dimensions
|
||||
}, sourceFormat)] : [];
|
||||
} else {
|
||||
var datasetModel = sourceHost;
|
||||
// Has upstream dataset.
|
||||
if (hasUpstream) {
|
||||
var result = this._applyTransform(upSourceMgrList);
|
||||
resultSourceList = result.sourceList;
|
||||
upstreamSignList = result.upstreamSignList;
|
||||
}
|
||||
// Is root dataset.
|
||||
else {
|
||||
var sourceData = datasetModel.get('source', true);
|
||||
resultSourceList = [createSource(sourceData, this._getSourceMetaRawOption(), null)];
|
||||
upstreamSignList = [];
|
||||
}
|
||||
}
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(resultSourceList && upstreamSignList);
|
||||
}
|
||||
this._setLocalSource(resultSourceList, upstreamSignList);
|
||||
};
|
||||
SourceManager.prototype._applyTransform = function (upMgrList) {
|
||||
var datasetModel = this._sourceHost;
|
||||
var transformOption = datasetModel.get('transform', true);
|
||||
var fromTransformResult = datasetModel.get('fromTransformResult', true);
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(fromTransformResult != null || transformOption != null);
|
||||
}
|
||||
if (fromTransformResult != null) {
|
||||
var errMsg = '';
|
||||
if (upMgrList.length !== 1) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'When using `fromTransformResult`, there should be only one upstream dataset';
|
||||
}
|
||||
doThrow(errMsg);
|
||||
}
|
||||
}
|
||||
var sourceList;
|
||||
var upSourceList = [];
|
||||
var upstreamSignList = [];
|
||||
each(upMgrList, function (upMgr) {
|
||||
upMgr.prepareSource();
|
||||
var upSource = upMgr.getSource(fromTransformResult || 0);
|
||||
var errMsg = '';
|
||||
if (fromTransformResult != null && !upSource) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Can not retrieve result by `fromTransformResult`: ' + fromTransformResult;
|
||||
}
|
||||
doThrow(errMsg);
|
||||
}
|
||||
upSourceList.push(upSource);
|
||||
upstreamSignList.push(upMgr._getVersionSign());
|
||||
});
|
||||
if (transformOption) {
|
||||
sourceList = applyDataTransform(transformOption, upSourceList, {
|
||||
datasetIndex: datasetModel.componentIndex
|
||||
});
|
||||
} else if (fromTransformResult != null) {
|
||||
sourceList = [cloneSourceShallow(upSourceList[0])];
|
||||
}
|
||||
return {
|
||||
sourceList: sourceList,
|
||||
upstreamSignList: upstreamSignList
|
||||
};
|
||||
};
|
||||
SourceManager.prototype._isDirty = function () {
|
||||
if (this._dirty) {
|
||||
return true;
|
||||
}
|
||||
// All sourceList is from the some upstream.
|
||||
var upSourceMgrList = this._getUpstreamSourceManagers();
|
||||
for (var i = 0; i < upSourceMgrList.length; i++) {
|
||||
var upSrcMgr = upSourceMgrList[i];
|
||||
if (
|
||||
// Consider the case that there is ancestor diry, call it recursively.
|
||||
// The performance is probably not an issue because usually the chain is not long.
|
||||
upSrcMgr._isDirty() || this._upstreamSignList[i] !== upSrcMgr._getVersionSign()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
/**
|
||||
* @param sourceIndex By default 0, means "main source".
|
||||
* In most cases there is only one source.
|
||||
*/
|
||||
SourceManager.prototype.getSource = function (sourceIndex) {
|
||||
sourceIndex = sourceIndex || 0;
|
||||
var source = this._sourceList[sourceIndex];
|
||||
if (!source) {
|
||||
// Series may share source instance with dataset.
|
||||
var upSourceMgrList = this._getUpstreamSourceManagers();
|
||||
return upSourceMgrList[0] && upSourceMgrList[0].getSource(sourceIndex);
|
||||
}
|
||||
return source;
|
||||
};
|
||||
/**
|
||||
*
|
||||
* Get a data store which can be shared across series.
|
||||
* Only available for series.
|
||||
*
|
||||
* @param seriesDimRequest Dimensions that are generated in series.
|
||||
* Should have been sorted by `storeDimIndex` asc.
|
||||
*/
|
||||
SourceManager.prototype.getSharedDataStore = function (seriesDimRequest) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert(isSeries(this._sourceHost), 'Can only call getDataStore on series source manager.');
|
||||
}
|
||||
var schema = seriesDimRequest.makeStoreSchema();
|
||||
return this._innerGetDataStore(schema.dimensions, seriesDimRequest.source, schema.hash);
|
||||
};
|
||||
SourceManager.prototype._innerGetDataStore = function (storeDims, seriesSource, sourceReadKey) {
|
||||
// TODO Can use other sourceIndex?
|
||||
var sourceIndex = 0;
|
||||
var storeList = this._storeList;
|
||||
var cachedStoreMap = storeList[sourceIndex];
|
||||
if (!cachedStoreMap) {
|
||||
cachedStoreMap = storeList[sourceIndex] = {};
|
||||
}
|
||||
var cachedStore = cachedStoreMap[sourceReadKey];
|
||||
if (!cachedStore) {
|
||||
var upSourceMgr = this._getUpstreamSourceManagers()[0];
|
||||
if (isSeries(this._sourceHost) && upSourceMgr) {
|
||||
cachedStore = upSourceMgr._innerGetDataStore(storeDims, seriesSource, sourceReadKey);
|
||||
} else {
|
||||
cachedStore = new DataStore();
|
||||
// Always create store from source of series.
|
||||
cachedStore.initData(new DefaultDataProvider(seriesSource, storeDims.length), storeDims);
|
||||
}
|
||||
cachedStoreMap[sourceReadKey] = cachedStore;
|
||||
}
|
||||
return cachedStore;
|
||||
};
|
||||
/**
|
||||
* PENDING: Is it fast enough?
|
||||
* If no upstream, return empty array.
|
||||
*/
|
||||
SourceManager.prototype._getUpstreamSourceManagers = function () {
|
||||
// Always get the relationship from the raw option.
|
||||
// Do not cache the link of the dependency graph, so that
|
||||
// there is no need to update them when change happens.
|
||||
var sourceHost = this._sourceHost;
|
||||
if (isSeries(sourceHost)) {
|
||||
var datasetModel = querySeriesUpstreamDatasetModel(sourceHost);
|
||||
return !datasetModel ? [] : [datasetModel.getSourceManager()];
|
||||
} else {
|
||||
return map(queryDatasetUpstreamDatasetModels(sourceHost), function (datasetModel) {
|
||||
return datasetModel.getSourceManager();
|
||||
});
|
||||
}
|
||||
};
|
||||
SourceManager.prototype._getSourceMetaRawOption = function () {
|
||||
var sourceHost = this._sourceHost;
|
||||
var seriesLayoutBy;
|
||||
var sourceHeader;
|
||||
var dimensions;
|
||||
if (isSeries(sourceHost)) {
|
||||
seriesLayoutBy = sourceHost.get('seriesLayoutBy', true);
|
||||
sourceHeader = sourceHost.get('sourceHeader', true);
|
||||
dimensions = sourceHost.get('dimensions', true);
|
||||
}
|
||||
// See [REQUIREMENT_MEMO], `non-root-dataset` do not support them.
|
||||
else if (!this._getUpstreamSourceManagers().length) {
|
||||
var model = sourceHost;
|
||||
seriesLayoutBy = model.get('seriesLayoutBy', true);
|
||||
sourceHeader = model.get('sourceHeader', true);
|
||||
dimensions = model.get('dimensions', true);
|
||||
}
|
||||
return {
|
||||
seriesLayoutBy: seriesLayoutBy,
|
||||
sourceHeader: sourceHeader,
|
||||
dimensions: dimensions
|
||||
};
|
||||
};
|
||||
return SourceManager;
|
||||
}();
|
||||
export { SourceManager };
|
||||
// Call this method after `super.init` and `super.mergeOption` to
|
||||
// disable the transform merge, but do not disable transform clone from rawOption.
|
||||
export function disableTransformOptionMerge(datasetModel) {
|
||||
var transformOption = datasetModel.option.transform;
|
||||
transformOption && setAsPrimitive(datasetModel.option.transform);
|
||||
}
|
||||
function isSeries(sourceHost) {
|
||||
// Avoid circular dependency with Series.ts
|
||||
return sourceHost.mainType === 'series';
|
||||
}
|
||||
function doThrow(errMsg) {
|
||||
throw new Error(errMsg);
|
||||
}
|
417
frontend/node_modules/echarts/lib/data/helper/transform.js
generated
vendored
Normal file
417
frontend/node_modules/echarts/lib/data/helper/transform.js
generated
vendored
Normal file
@ -0,0 +1,417 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* AUTO-GENERATED FILE. DO NOT MODIFY.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SERIES_LAYOUT_BY_COLUMN, SOURCE_FORMAT_OBJECT_ROWS, SOURCE_FORMAT_ARRAY_ROWS } from '../../util/types.js';
|
||||
import { normalizeToArray } from '../../util/model.js';
|
||||
import { createHashMap, bind, each, hasOwn, map, clone, isObject, extend, isNumber } from 'zrender/lib/core/util.js';
|
||||
import { getRawSourceItemGetter, getRawSourceDataCounter, getRawSourceValueGetter } from './dataProvider.js';
|
||||
import { parseDataValue } from './dataValueHelper.js';
|
||||
import { log, makePrintable, throwError } from '../../util/log.js';
|
||||
import { createSource, detectSourceFormat } from '../Source.js';
|
||||
/**
|
||||
* TODO: disable writable.
|
||||
* This structure will be exposed to users.
|
||||
*/
|
||||
var ExternalSource = /** @class */function () {
|
||||
function ExternalSource() {}
|
||||
ExternalSource.prototype.getRawData = function () {
|
||||
// Only built-in transform available.
|
||||
throw new Error('not supported');
|
||||
};
|
||||
ExternalSource.prototype.getRawDataItem = function (dataIndex) {
|
||||
// Only built-in transform available.
|
||||
throw new Error('not supported');
|
||||
};
|
||||
ExternalSource.prototype.cloneRawData = function () {
|
||||
return;
|
||||
};
|
||||
/**
|
||||
* @return If dimension not found, return null/undefined.
|
||||
*/
|
||||
ExternalSource.prototype.getDimensionInfo = function (dim) {
|
||||
return;
|
||||
};
|
||||
/**
|
||||
* dimensions defined if and only if either:
|
||||
* (a) dataset.dimensions are declared.
|
||||
* (b) dataset data include dimensions definitions in data (detected or via specified `sourceHeader`).
|
||||
* If dimensions are defined, `dimensionInfoAll` is corresponding to
|
||||
* the defined dimensions.
|
||||
* Otherwise, `dimensionInfoAll` is determined by data columns.
|
||||
* @return Always return an array (even empty array).
|
||||
*/
|
||||
ExternalSource.prototype.cloneAllDimensionInfo = function () {
|
||||
return;
|
||||
};
|
||||
ExternalSource.prototype.count = function () {
|
||||
return;
|
||||
};
|
||||
/**
|
||||
* Only support by dimension index.
|
||||
* No need to support by dimension name in transform function,
|
||||
* because transform function is not case-specific, no need to use name literally.
|
||||
*/
|
||||
ExternalSource.prototype.retrieveValue = function (dataIndex, dimIndex) {
|
||||
return;
|
||||
};
|
||||
ExternalSource.prototype.retrieveValueFromItem = function (dataItem, dimIndex) {
|
||||
return;
|
||||
};
|
||||
ExternalSource.prototype.convertValue = function (rawVal, dimInfo) {
|
||||
return parseDataValue(rawVal, dimInfo);
|
||||
};
|
||||
return ExternalSource;
|
||||
}();
|
||||
export { ExternalSource };
|
||||
function createExternalSource(internalSource, externalTransform) {
|
||||
var extSource = new ExternalSource();
|
||||
var data = internalSource.data;
|
||||
var sourceFormat = extSource.sourceFormat = internalSource.sourceFormat;
|
||||
var sourceHeaderCount = internalSource.startIndex;
|
||||
var errMsg = '';
|
||||
if (internalSource.seriesLayoutBy !== SERIES_LAYOUT_BY_COLUMN) {
|
||||
// For the logic simplicity in transformer, only 'culumn' is
|
||||
// supported in data transform. Otherwise, the `dimensionsDefine`
|
||||
// might be detected by 'row', which probably confuses users.
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = '`seriesLayoutBy` of upstream dataset can only be "column" in data transform.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
// [MEMO]
|
||||
// Create a new dimensions structure for exposing.
|
||||
// Do not expose all dimension info to users directly.
|
||||
// Because the dimension is probably auto detected from data and not might reliable.
|
||||
// Should not lead the transformers to think that is reliable and return it.
|
||||
// See [DIMENSION_INHERIT_RULE] in `sourceManager.ts`.
|
||||
var dimensions = [];
|
||||
var dimsByName = {};
|
||||
var dimsDef = internalSource.dimensionsDefine;
|
||||
if (dimsDef) {
|
||||
each(dimsDef, function (dimDef, idx) {
|
||||
var name = dimDef.name;
|
||||
var dimDefExt = {
|
||||
index: idx,
|
||||
name: name,
|
||||
displayName: dimDef.displayName
|
||||
};
|
||||
dimensions.push(dimDefExt);
|
||||
// Users probably do not specify dimension name. For simplicity, data transform
|
||||
// does not generate dimension name.
|
||||
if (name != null) {
|
||||
// Dimension name should not be duplicated.
|
||||
// For simplicity, data transform forbids name duplication, do not generate
|
||||
// new name like module `completeDimensions.ts` did, but just tell users.
|
||||
var errMsg_1 = '';
|
||||
if (hasOwn(dimsByName, name)) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg_1 = 'dimension name "' + name + '" duplicated.';
|
||||
}
|
||||
throwError(errMsg_1);
|
||||
}
|
||||
dimsByName[name] = dimDefExt;
|
||||
}
|
||||
});
|
||||
}
|
||||
// If dimension definitions are not defined and can not be detected.
|
||||
// e.g., pure data `[[11, 22], ...]`.
|
||||
else {
|
||||
for (var i = 0; i < internalSource.dimensionsDetectedCount || 0; i++) {
|
||||
// Do not generete name or anything others. The consequence process in
|
||||
// `transform` or `series` probably have there own name generation strategry.
|
||||
dimensions.push({
|
||||
index: i
|
||||
});
|
||||
}
|
||||
}
|
||||
// Implement public methods:
|
||||
var rawItemGetter = getRawSourceItemGetter(sourceFormat, SERIES_LAYOUT_BY_COLUMN);
|
||||
if (externalTransform.__isBuiltIn) {
|
||||
extSource.getRawDataItem = function (dataIndex) {
|
||||
return rawItemGetter(data, sourceHeaderCount, dimensions, dataIndex);
|
||||
};
|
||||
extSource.getRawData = bind(getRawData, null, internalSource);
|
||||
}
|
||||
extSource.cloneRawData = bind(cloneRawData, null, internalSource);
|
||||
var rawCounter = getRawSourceDataCounter(sourceFormat, SERIES_LAYOUT_BY_COLUMN);
|
||||
extSource.count = bind(rawCounter, null, data, sourceHeaderCount, dimensions);
|
||||
var rawValueGetter = getRawSourceValueGetter(sourceFormat);
|
||||
extSource.retrieveValue = function (dataIndex, dimIndex) {
|
||||
var rawItem = rawItemGetter(data, sourceHeaderCount, dimensions, dataIndex);
|
||||
return retrieveValueFromItem(rawItem, dimIndex);
|
||||
};
|
||||
var retrieveValueFromItem = extSource.retrieveValueFromItem = function (dataItem, dimIndex) {
|
||||
if (dataItem == null) {
|
||||
return;
|
||||
}
|
||||
var dimDef = dimensions[dimIndex];
|
||||
// When `dimIndex` is `null`, `rawValueGetter` return the whole item.
|
||||
if (dimDef) {
|
||||
return rawValueGetter(dataItem, dimIndex, dimDef.name);
|
||||
}
|
||||
};
|
||||
extSource.getDimensionInfo = bind(getDimensionInfo, null, dimensions, dimsByName);
|
||||
extSource.cloneAllDimensionInfo = bind(cloneAllDimensionInfo, null, dimensions);
|
||||
return extSource;
|
||||
}
|
||||
function getRawData(upstream) {
|
||||
var sourceFormat = upstream.sourceFormat;
|
||||
if (!isSupportedSourceFormat(sourceFormat)) {
|
||||
var errMsg = '';
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = '`getRawData` is not supported in source format ' + sourceFormat;
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
return upstream.data;
|
||||
}
|
||||
function cloneRawData(upstream) {
|
||||
var sourceFormat = upstream.sourceFormat;
|
||||
var data = upstream.data;
|
||||
if (!isSupportedSourceFormat(sourceFormat)) {
|
||||
var errMsg = '';
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = '`cloneRawData` is not supported in source format ' + sourceFormat;
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
if (sourceFormat === SOURCE_FORMAT_ARRAY_ROWS) {
|
||||
var result = [];
|
||||
for (var i = 0, len = data.length; i < len; i++) {
|
||||
// Not strictly clone for performance
|
||||
result.push(data[i].slice());
|
||||
}
|
||||
return result;
|
||||
} else if (sourceFormat === SOURCE_FORMAT_OBJECT_ROWS) {
|
||||
var result = [];
|
||||
for (var i = 0, len = data.length; i < len; i++) {
|
||||
// Not strictly clone for performance
|
||||
result.push(extend({}, data[i]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
function getDimensionInfo(dimensions, dimsByName, dim) {
|
||||
if (dim == null) {
|
||||
return;
|
||||
}
|
||||
// Keep the same logic as `List::getDimension` did.
|
||||
if (isNumber(dim)
|
||||
// If being a number-like string but not being defined a dimension name.
|
||||
|| !isNaN(dim) && !hasOwn(dimsByName, dim)) {
|
||||
return dimensions[dim];
|
||||
} else if (hasOwn(dimsByName, dim)) {
|
||||
return dimsByName[dim];
|
||||
}
|
||||
}
|
||||
function cloneAllDimensionInfo(dimensions) {
|
||||
return clone(dimensions);
|
||||
}
|
||||
var externalTransformMap = createHashMap();
|
||||
export function registerExternalTransform(externalTransform) {
|
||||
externalTransform = clone(externalTransform);
|
||||
var type = externalTransform.type;
|
||||
var errMsg = '';
|
||||
if (!type) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Must have a `type` when `registerTransform`.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
var typeParsed = type.split(':');
|
||||
if (typeParsed.length !== 2) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Name must include namespace like "ns:regression".';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
// Namespace 'echarts:xxx' is official namespace, where the transforms should
|
||||
// be called directly via 'xxx' rather than 'echarts:xxx'.
|
||||
var isBuiltIn = false;
|
||||
if (typeParsed[0] === 'echarts') {
|
||||
type = typeParsed[1];
|
||||
isBuiltIn = true;
|
||||
}
|
||||
externalTransform.__isBuiltIn = isBuiltIn;
|
||||
externalTransformMap.set(type, externalTransform);
|
||||
}
|
||||
export function applyDataTransform(rawTransOption, sourceList, infoForPrint) {
|
||||
var pipedTransOption = normalizeToArray(rawTransOption);
|
||||
var pipeLen = pipedTransOption.length;
|
||||
var errMsg = '';
|
||||
if (!pipeLen) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'If `transform` declared, it should at least contain one transform.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
for (var i = 0, len = pipeLen; i < len; i++) {
|
||||
var transOption = pipedTransOption[i];
|
||||
sourceList = applySingleDataTransform(transOption, sourceList, infoForPrint, pipeLen === 1 ? null : i);
|
||||
// piped transform only support single input, except the fist one.
|
||||
// piped transform only support single output, except the last one.
|
||||
if (i !== len - 1) {
|
||||
sourceList.length = Math.max(sourceList.length, 1);
|
||||
}
|
||||
}
|
||||
return sourceList;
|
||||
}
|
||||
function applySingleDataTransform(transOption, upSourceList, infoForPrint,
|
||||
// If `pipeIndex` is null/undefined, no piped transform.
|
||||
pipeIndex) {
|
||||
var errMsg = '';
|
||||
if (!upSourceList.length) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Must have at least one upstream dataset.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
if (!isObject(transOption)) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'transform declaration must be an object rather than ' + typeof transOption + '.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
var transType = transOption.type;
|
||||
var externalTransform = externalTransformMap.get(transType);
|
||||
if (!externalTransform) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Can not find transform on type "' + transType + '".';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
// Prepare source
|
||||
var extUpSourceList = map(upSourceList, function (upSource) {
|
||||
return createExternalSource(upSource, externalTransform);
|
||||
});
|
||||
var resultList = normalizeToArray(externalTransform.transform({
|
||||
upstream: extUpSourceList[0],
|
||||
upstreamList: extUpSourceList,
|
||||
config: clone(transOption.config)
|
||||
}));
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (transOption.print) {
|
||||
var printStrArr = map(resultList, function (extSource) {
|
||||
var pipeIndexStr = pipeIndex != null ? ' === pipe index: ' + pipeIndex : '';
|
||||
return ['=== dataset index: ' + infoForPrint.datasetIndex + pipeIndexStr + ' ===', '- transform result data:', makePrintable(extSource.data), '- transform result dimensions:', makePrintable(extSource.dimensions)].join('\n');
|
||||
}).join('\n');
|
||||
log(printStrArr);
|
||||
}
|
||||
}
|
||||
return map(resultList, function (result, resultIndex) {
|
||||
var errMsg = '';
|
||||
if (!isObject(result)) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'A transform should not return some empty results.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
if (!result.data) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Transform result data should be not be null or undefined';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
var sourceFormat = detectSourceFormat(result.data);
|
||||
if (!isSupportedSourceFormat(sourceFormat)) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
errMsg = 'Transform result data should be array rows or object rows.';
|
||||
}
|
||||
throwError(errMsg);
|
||||
}
|
||||
var resultMetaRawOption;
|
||||
var firstUpSource = upSourceList[0];
|
||||
/**
|
||||
* Intuitively, the end users known the content of the original `dataset.source`,
|
||||
* calucating the transform result in mind.
|
||||
* Suppose the original `dataset.source` is:
|
||||
* ```js
|
||||
* [
|
||||
* ['product', '2012', '2013', '2014', '2015'],
|
||||
* ['AAA', 41.1, 30.4, 65.1, 53.3],
|
||||
* ['BBB', 86.5, 92.1, 85.7, 83.1],
|
||||
* ['CCC', 24.1, 67.2, 79.5, 86.4]
|
||||
* ]
|
||||
* ```
|
||||
* The dimension info have to be detected from the source data.
|
||||
* Some of the transformers (like filter, sort) will follow the dimension info
|
||||
* of upstream, while others use new dimensions (like aggregate).
|
||||
* Transformer can output a field `dimensions` to define the its own output dimensions.
|
||||
* We also allow transformers to ignore the output `dimensions` field, and
|
||||
* inherit the upstream dimensions definition. It can reduce the burden of handling
|
||||
* dimensions in transformers.
|
||||
*
|
||||
* See also [DIMENSION_INHERIT_RULE] in `sourceManager.ts`.
|
||||
*/
|
||||
if (firstUpSource && resultIndex === 0
|
||||
// If transformer returns `dimensions`, it means that the transformer has different
|
||||
// dimensions definitions. We do not inherit anything from upstream.
|
||||
&& !result.dimensions) {
|
||||
var startIndex = firstUpSource.startIndex;
|
||||
// We copy the header of upstream to the result, because:
|
||||
// (1) The returned data always does not contain header line and can not be used
|
||||
// as dimension-detection. In this case we can not use "detected dimensions" of
|
||||
// upstream directly, because it might be detected based on different `seriesLayoutBy`.
|
||||
// (2) We should support that the series read the upstream source in `seriesLayoutBy: 'row'`.
|
||||
// So the original detected header should be add to the result, otherwise they can not be read.
|
||||
if (startIndex) {
|
||||
result.data = firstUpSource.data.slice(0, startIndex).concat(result.data);
|
||||
}
|
||||
resultMetaRawOption = {
|
||||
seriesLayoutBy: SERIES_LAYOUT_BY_COLUMN,
|
||||
sourceHeader: startIndex,
|
||||
dimensions: firstUpSource.metaRawOption.dimensions
|
||||
};
|
||||
} else {
|
||||
resultMetaRawOption = {
|
||||
seriesLayoutBy: SERIES_LAYOUT_BY_COLUMN,
|
||||
sourceHeader: 0,
|
||||
dimensions: result.dimensions
|
||||
};
|
||||
}
|
||||
return createSource(result.data, resultMetaRawOption, null);
|
||||
});
|
||||
}
|
||||
function isSupportedSourceFormat(sourceFormat) {
|
||||
return sourceFormat === SOURCE_FORMAT_ARRAY_ROWS || sourceFormat === SOURCE_FORMAT_OBJECT_ROWS;
|
||||
}
|
Reference in New Issue
Block a user