Skip to content

Commit

Permalink
update d3.nest(...).rollup(...) to d3.rollups (#2)
Browse files Browse the repository at this point in the history
  • Loading branch information
yhoonkim committed Sep 28, 2020
1 parent 3c42fb4 commit 078da1d
Show file tree
Hide file tree
Showing 8 changed files with 357 additions and 259 deletions.
89 changes: 50 additions & 39 deletions gemini.js

Large diffs are not rendered by default.

85 changes: 48 additions & 37 deletions gemini.web.js
Original file line number Diff line number Diff line change
Expand Up @@ -11537,20 +11537,20 @@
if (typeof groupby === "string") {
groupby = [groupby];
}
return d3.nest()
.key(d => groupby.map(f => d.datum[f]).join("@@_@@"))
.entries(data)
return d3.groups(data, d => groupby.map(f => d.datum[f]).join("@@_@@"))
.map(group => {
const values = group[1];
let datum = groupby.reduce((datum, f) => {
datum[f] = group.values[0].datum[f];
datum[f] = values[0].datum[f];
return datum;
}, { count: group.values.length });
}, { count: values.length });
return {
datum: datum,
mark: {role: "group", marktype: "group"},
items: [{items: group.values }]
items: [{items: values }]
};
});

}
function unpackData(data) {
if (data[0].mark.marktype !== "group") {
Expand Down Expand Up @@ -16156,32 +16156,43 @@
return (acc = acc && (val !== undefined ? !isNaN(Number(val)) : true));
}, true);
if (!staggering.by) {
grouped = d3.nest()
.key(d => {
const val = d.__staggering_id__;
return val === undefined ? "__empty__" : val;
})
.sortKeys(getOrderFn(true, staggering.order))
.entries(dataWithTiming);


const orderFn = getOrderFn(true, staggering.order);
grouped = d3.groups(dataWithTiming, d => {
const val = d.__staggering_id__;
return val === undefined ? "__empty__" : val;
});
if (typeof(orderFn) === "function") {
grouped.sort((a,b) => orderFn(a,b));
}
} else if (typeof staggering.by === "string") {
grouped = d3.nest()
.key(d => {
const val = (d.initial || d.final)[staggering.by];
return val === undefined ? "__empty__" : val;
})
.sortKeys(getOrderFn(isNumber, staggering.order))
.entries(dataWithTiming);


grouped = d3.groups(dataWithTiming, d => {
const val = (d.initial || d.final)[staggering.by];
return val === undefined ? "__empty__" : val;
});

const orderFn = getOrderFn(isNumber, staggering.order);
if (typeof(orderFn) === "function") {
grouped.sort((a,b) => orderFn(a,b));
}
} else if (staggering.by.initial || staggering.by.final) {
const which = staggering.by.initial ? "initial" : "final";
grouped = d3.nest()
.key(d => {
const val = (which === "initial"
? d.initial || d.final
: d.final || d.initial)[staggering.by[which]];
return val === undefined ? "__empty__" : val;
})
.sortKeys(getOrderFn(isNumber, staggering.order))
.entries(dataWithTiming);


grouped = d3.groups(dataWithTiming, d => {
const val = (which === "initial"
? d.initial || d.final
: d.final || d.initial)[staggering.by[which]];
return val === undefined ? "__empty__" : val;
});

const orderFn = getOrderFn(isNumber, staggering.order);
if (typeof(orderFn) === "function") {
grouped.sort((a,b) => orderFn(a,b));
}
}

N = grouped.length;
Expand Down Expand Up @@ -16211,13 +16222,13 @@
});

timings.groups = grouped.map((g, i) => {
return staggeredTiming(subStaggering, g.values, durations[i]);
return staggeredTiming(subStaggering, g[1], durations[i]);
});

return getFlattenTimings(timings);
}
grouped.forEach((group, i) => {
group.values.forEach(datum => {
group[1].forEach(datum => {
datum.delay = delays[i];
datum.duration = durations[i];
});
Expand Down Expand Up @@ -22977,9 +22988,8 @@
}
let axesScales = vegaAxes.filter(a => a.grid).map(a => a.scale);

return d3.nest()
.key(axis => axis.scale)
.rollup(axes => {
return d3.rollups(vegaAxes,
axes => {
let axisWithGrid = axes.find(a => a.grid);
let axisWithoutGrid = { ...axes.find(a => !a.grid) };

Expand All @@ -22991,9 +23001,10 @@
axisWithoutGrid.zindex = 0;
}
return axisWithoutGrid;
}).entries(vegaAxes)
.map(d => d.value)
.sort((a,b) => (axesScales.indexOf(a.scale) - axesScales.indexOf(b.scale)));
},
axis => axis.scale
).map(d => d[1])
.sort((a,b) => (axesScales.indexOf(a.scale) - axesScales.indexOf(b.scale)));
}

const { animate } = Gemini;
Expand Down
2 changes: 1 addition & 1 deletion gemini.web.js.map

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
},
"dependencies": {
"ajv": "^6.12.0",
"d3": "5.14.2",
"d3": "^6.2",
"d3-interpolate-path": "^2.1.1",
"d3-selection-multi": "^1.0.1",
"djv": "^2.1.3-alpha.0",
Expand Down
65 changes: 37 additions & 28 deletions src/actuator/staggering.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import * as d3 from "d3";
import { group } from "d3";
import { flatten } from "../util/util.js";
import { getEaseFn } from "./util";

Expand Down Expand Up @@ -37,35 +38,43 @@ function staggeredTiming(staggering, data, duration) {
return (acc = acc && (val !== undefined ? !isNaN(Number(val)) : true));
}, true);
if (!staggering.by) {
grouped = d3
.nest()
.key(d => {
const val = d.__staggering_id__;
return val === undefined ? "__empty__" : val;
})
.sortKeys(getOrderFn(true, staggering.order))
.entries(dataWithTiming);


const orderFn = getOrderFn(true, staggering.order);
grouped = d3.groups(dataWithTiming, d => {
const val = d.__staggering_id__;
return val === undefined ? "__empty__" : val;
})
if (typeof(orderFn) === "function") {
grouped.sort((a,b) => orderFn(a,b));
}
} else if (typeof staggering.by === "string") {
grouped = d3
.nest()
.key(d => {
const val = (d.initial || d.final)[staggering.by];
return val === undefined ? "__empty__" : val;
})
.sortKeys(getOrderFn(isNumber, staggering.order))
.entries(dataWithTiming);


grouped = d3.groups(dataWithTiming, d => {
const val = (d.initial || d.final)[staggering.by];
return val === undefined ? "__empty__" : val;
})

const orderFn = getOrderFn(isNumber, staggering.order);
if (typeof(orderFn) === "function") {
grouped.sort((a,b) => orderFn(a,b));
}
} else if (staggering.by.initial || staggering.by.final) {
const which = staggering.by.initial ? "initial" : "final";
grouped = d3
.nest()
.key(d => {
const val = (which === "initial"
? d.initial || d.final
: d.final || d.initial)[staggering.by[which]];
return val === undefined ? "__empty__" : val;
})
.sortKeys(getOrderFn(isNumber, staggering.order))
.entries(dataWithTiming);


grouped = d3.groups(dataWithTiming, d => {
const val = (which === "initial"
? d.initial || d.final
: d.final || d.initial)[staggering.by[which]];
return val === undefined ? "__empty__" : val;
})

const orderFn = getOrderFn(isNumber, staggering.order);
if (typeof(orderFn) === "function") {
grouped.sort((a,b) => orderFn(a,b));
}
}

N = grouped.length;
Expand Down Expand Up @@ -95,13 +104,13 @@ function staggeredTiming(staggering, data, duration) {
});

timings.groups = grouped.map((g, i) => {
return staggeredTiming(subStaggering, g.values, durations[i]);
return staggeredTiming(subStaggering, g[1], durations[i]);
});

return getFlattenTimings(timings);
}
grouped.forEach((group, i) => {
group.values.forEach(datum => {
group[1].forEach(datum => {
datum.delay = delays[i];
datum.duration = durations[i];
});
Expand Down
13 changes: 6 additions & 7 deletions src/util/vgDataHelper.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,20 @@ function facetData(data, facetDef) {
if (typeof groupby === "string") {
groupby = [groupby];
}
return d3
.nest()
.key(d => groupby.map(f => d.datum[f]).join("@@_@@"))
.entries(data)
return d3.groups(data, d => groupby.map(f => d.datum[f]).join("@@_@@"))
.map(group => {
const values = group[1]
let datum = groupby.reduce((datum, f) => {
datum[f] = group.values[0].datum[f];
datum[f] = values[0].datum[f];
return datum;
}, { count: group.values.length });
}, { count: values.length });
return {
datum: datum,
mark: {role: "group", marktype: "group"},
items: [{items: group.values }]
items: [{items: values }]
};
});

}
function unpackData(data) {
if (data[0].mark.marktype !== "group") {
Expand Down
12 changes: 6 additions & 6 deletions src/util/vl2vg4gemini.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,8 @@ function mergeDuplicatedAxes(vegaAxes) {
}
let axesScales = vegaAxes.filter(a => a.grid).map(a => a.scale);

return d3.nest()
.key(axis => axis.scale)
.rollup(axes => {
return d3.rollups(vegaAxes,
axes => {
let axisWithGrid = axes.find(a => a.grid);
let axisWithoutGrid = { ...axes.find(a => !a.grid) };

Expand All @@ -51,8 +50,9 @@ function mergeDuplicatedAxes(vegaAxes) {
axisWithoutGrid.zindex = 0;
}
return axisWithoutGrid;
}).entries(vegaAxes)
.map(d => d.value)
.sort((a,b) => (axesScales.indexOf(a.scale) - axesScales.indexOf(b.scale)));
},
axis => axis.scale
).map(d => d[1])
.sort((a,b) => (axesScales.indexOf(a.scale) - axesScales.indexOf(b.scale)));
}

Loading

0 comments on commit 078da1d

Please sign in to comment.