mirror of
https://framagit.org/enfance-libre/statistiques
synced 2025-12-07 12:33:46 +00:00
feat: sankey diagram WIP
Mermaid ne supporte pas les lien circulaire ce qui rend l'usage impossible pour le moment
This commit is contained in:
parent
4a4311e829
commit
63482979fe
5 changed files with 81 additions and 12 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -16,4 +16,6 @@
|
||||||
dist
|
dist
|
||||||
test-coverage
|
test-coverage
|
||||||
node_modules
|
node_modules
|
||||||
|
# outptu stats files
|
||||||
el-stats*.json
|
el-stats*.json
|
||||||
|
el-stats*.txt
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,8 @@ import { statsGeneralesDesc } from "./statistiques/v2/generales/StatsGenerales";
|
||||||
import { computeStatsSociales } from "./statistiques/v2/sociales/computeStatsSociales";
|
import { computeStatsSociales } from "./statistiques/v2/sociales/computeStatsSociales";
|
||||||
import { statsSocialesDesc } from "./statistiques/v2/sociales/StatsSociales";
|
import { statsSocialesDesc } from "./statistiques/v2/sociales/StatsSociales";
|
||||||
import { checkDbSchemas } from "./notion/fetch/schemaCheck/checkDbSchemas";
|
import { checkDbSchemas } from "./notion/fetch/schemaCheck/checkDbSchemas";
|
||||||
|
import { computeSequencEvtPenalSankeyData } from "./statistiques/v2/penales/computeSankeyData";
|
||||||
|
import { sankeyDataToMermaidDiagram } from "./statistiques/v2/sankey/sankeyDataToMermaidDiagram";
|
||||||
|
|
||||||
type ProcessOptions = {
|
type ProcessOptions = {
|
||||||
dryRun: boolean;
|
dryRun: boolean;
|
||||||
|
|
@ -82,6 +84,11 @@ function buildProcessOptions(): ProcessOptions {
|
||||||
const statsPenales = computeStatsPenales(familles);
|
const statsPenales = computeStatsPenales(familles);
|
||||||
const statsSociales = computeStatsSociales(familles);
|
const statsSociales = computeStatsSociales(familles);
|
||||||
|
|
||||||
|
console.log("Sankey Diagram:");
|
||||||
|
writeFileSync(
|
||||||
|
"./el-stats-penal-sankey-diagram.txt",
|
||||||
|
sankeyDataToMermaidDiagram(computeSequencEvtPenalSankeyData(familles))
|
||||||
|
);
|
||||||
if (options.dryRun) {
|
if (options.dryRun) {
|
||||||
console.log(
|
console.log(
|
||||||
"Dry run => Skip Publishing. Stats are dumped in file el-stats-xxx.json"
|
"Dry run => Skip Publishing. Stats are dumped in file el-stats-xxx.json"
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { EvenementFamille } from "../../../data/EvenementFamille";
|
||||||
import { Famille } from "../../../data/Famille";
|
import { Famille } from "../../../data/Famille";
|
||||||
import { SankeyData } from "../sankey/SankeyData";
|
import { SankeyData } from "../sankey/SankeyData";
|
||||||
export function computeSequencEvtPenalSankeyData(familles: Famille[]) {
|
export function computeSequencEvtPenalSankeyData(familles: Famille[]) {
|
||||||
|
|
@ -5,15 +6,35 @@ export function computeSequencEvtPenalSankeyData(familles: Famille[]) {
|
||||||
|
|
||||||
familles.forEach((f) => {
|
familles.forEach((f) => {
|
||||||
// Compute all transitions, Events are already sorted
|
// Compute all transitions, Events are already sorted
|
||||||
for (let index = 0; index < f.Evenements.length - 1; index++) {
|
const nonNullEvents = f.Evenements.filter(
|
||||||
const fromEvt = f.Evenements[index];
|
(evt) => evt.Type !== null && (evt.Type as string) !== "null"
|
||||||
const toEvt = f.Evenements[index + 1];
|
);
|
||||||
const fromTransitions = sankeyData[fromEvt.Type] || {};
|
|
||||||
sankeyData[fromEvt.Type] = {
|
// remove duplicate types to avoid circular links that are not supported by mermaid
|
||||||
...fromTransitions,
|
const evenements = removeDuplicateTypes(nonNullEvents);
|
||||||
[toEvt.Type]: (fromTransitions[toEvt.Type] || 0) + 1,
|
|
||||||
|
for (let index = 0; index < evenements.length - 1; index++) {
|
||||||
|
const sourceEvt = evenements[index];
|
||||||
|
const targetEvt = evenements[index + 1];
|
||||||
|
const sourceSankeyData = sankeyData[sourceEvt.Type] || {};
|
||||||
|
sankeyData[sourceEvt.Type] = {
|
||||||
|
...sourceSankeyData,
|
||||||
|
[targetEvt.Type]: (sourceSankeyData[targetEvt.Type] || 0) + 1,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return sankeyData;
|
return sankeyData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function removeDuplicateTypes(events: EvenementFamille[]): EvenementFamille[] {
|
||||||
|
const seenTypes: { [key: string]: boolean } = {};
|
||||||
|
return events.filter((evt) => {
|
||||||
|
const type = evt.Type as string;
|
||||||
|
if (seenTypes[type]) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
seenTypes[type] = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
* Sankey diagram transitions
|
* Sankey diagram transitions
|
||||||
*/
|
*/
|
||||||
export type SankeyData = {
|
export type SankeyData = {
|
||||||
[fromKey: string]: {
|
[source: string]: {
|
||||||
[toKey: string]: number;
|
[target: string]: number;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,44 @@
|
||||||
import { SankeyData } from "./SankeyData";
|
import { SankeyData } from "./SankeyData";
|
||||||
|
|
||||||
eport function sankeyDataToMermaidDiagram(sankeyData: SankeyData) {
|
export type SankeyDiagramOptions = { showValues: boolean };
|
||||||
|
export function sankeyDataToMermaidDiagram(
|
||||||
}
|
sankeyData: SankeyData,
|
||||||
|
options: SankeyDiagramOptions = { showValues: true }
|
||||||
|
) {
|
||||||
|
const header = `---
|
||||||
|
config:
|
||||||
|
sankey:
|
||||||
|
showValues: ${options.showValues}
|
||||||
|
---
|
||||||
|
sankey-beta
|
||||||
|
`;
|
||||||
|
return header + arrayToCsv(dataLines(sankeyData));
|
||||||
|
}
|
||||||
|
|
||||||
|
function dataLines(sankeyData: SankeyData): Array<[string, string, number]> {
|
||||||
|
return Object.entries(sankeyData).flatMap(([source, targets]) => {
|
||||||
|
return Object.entries(targets).map<[string, string, number]>(
|
||||||
|
([target, count]) => {
|
||||||
|
return [source, target, count];
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function arrayToCsv(data: Array<Array<string | number>>): string {
|
||||||
|
return data.map((dataLine) => dataLine.map(toCsvValue).join(",")).join("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
function toCsvValue(value: string | number): string {
|
||||||
|
if (typeof value === "number") {
|
||||||
|
return value.toString();
|
||||||
|
} else {
|
||||||
|
// Mermaid does not seem to support diacritics
|
||||||
|
const withoutDiacritics = value
|
||||||
|
.normalize("NFD")
|
||||||
|
.replace(/[\u0300-\u036f]/g, "");
|
||||||
|
|
||||||
|
const escaped = withoutDiacritics.replaceAll('"', '""');
|
||||||
|
return `"${escaped}"`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue