Skip to content

Commit d16b76d

Browse files
committed
add generate metrics, and capitalize node names and metric names
1 parent 267f46c commit d16b76d

File tree

3 files changed

+47
-2
lines changed

3 files changed

+47
-2
lines changed

spark-ui/src/components/SqlFlow/StageNode.tsx

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { getSizeFromMetrics } from '../../reducers/PlanGraphUtils';
1313
import { truncateMiddle } from "../../reducers/PlanParsers/PlanParserUtils";
1414
import {
1515
calculatePercentage,
16+
capitalizeWords,
1617
humanFileSize,
1718
humanizeTimeDiff,
1819
parseBytesString,
@@ -678,6 +679,8 @@ export const StageNode: FC<{
678679
});
679680
}
680681

682+
const dataTableWithCapitalizedNames = dataTable.map(metric => ({ ...metric, name: capitalizeWords(metric.name) }))
683+
681684
return (
682685
<>
683686
<Handle type="target" position={Position.Left} id="b" />
@@ -695,7 +698,7 @@ export const StageNode: FC<{
695698
>
696699
{data.node.enrichedName}
697700
</Typography>
698-
{dataTable.map((metric) => (
701+
{dataTableWithCapitalizedNames.map((metric) => (
699702
<ConditionalWrapper
700703
key={metric.name}
701704
condition={metric.tooltip !== undefined}

spark-ui/src/reducers/SqlReducer.ts

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import { SparkSQL, SparkSQLs, SqlStatus } from "../interfaces/SparkSQLs";
1818
import { NodesMetrics } from "../interfaces/SqlMetrics";
1919
import {
2020
calculatePercentage,
21+
capitalizeWords,
2122
timeStrToEpocTime,
2223
timeStringToMilliseconds,
2324
} from "../utils/FormatUtils";
@@ -222,7 +223,7 @@ function calculateSql(
222223
rddScopeId: nodePlan?.rddScopeId,
223224
type: type,
224225
parsedPlan: parsedPlan,
225-
enrichedName: nodeEnrichedNameBuilder(node.nodeName, parsedPlan),
226+
enrichedName: capitalizeWords(nodeEnrichedNameBuilder(node.nodeName, parsedPlan)),
226227
isCodegenNode: isCodegenNode,
227228
wholeStageCodegenId: isCodegenNode
228229
? extractCodegenId()
@@ -702,6 +703,36 @@ function updateParsedPlan(
702703
return node.parsedPlan;
703704
}
704705

706+
function addGenerateMetrics(
707+
node: EnrichedSqlNode,
708+
updatedMetrics: EnrichedSqlMetric[],
709+
graph: Graph,
710+
allNodes: EnrichedSqlNode[],
711+
): EnrichedSqlMetric | null {
712+
if (node.nodeName === "Generate") {
713+
const inputNode = findLastNodeWithInputRows(node, graph, allNodes);
714+
if (!inputNode) {
715+
return null;
716+
}
717+
718+
const inputRows = getRowsFromMetrics(inputNode.metrics);
719+
if (inputRows === null || inputRows === 0) {
720+
return null;
721+
}
722+
723+
const outputRows = getRowsFromMetrics(updatedMetrics);
724+
if (outputRows === null) {
725+
return null;
726+
}
727+
728+
const ratio = outputRows / inputRows;
729+
const ratioFormatted = ratio.toFixed(2);
730+
731+
return { name: `${node.enrichedName} Ratio`, value: `${ratioFormatted}X` };
732+
}
733+
return null;
734+
}
735+
705736
function updateNodeMetrics(
706737
node: EnrichedSqlNode,
707738
metrics: EnrichedSqlMetric[],
@@ -712,6 +743,7 @@ function updateNodeMetrics(
712743
const filterRatio = addFilterRatioMetric(node, updatedOriginalMetrics, graph, allNodes);
713744
const crossJoinFilterRatio = addCrossJoinFilterRatioMetric(node, updatedOriginalMetrics, graph, allNodes);
714745
const joinMetrics = addJoinMetrics(node, updatedOriginalMetrics, graph, allNodes);
746+
const generateMetrics = addGenerateMetrics(node, updatedOriginalMetrics, graph, allNodes);
715747
return [
716748
...updatedOriginalMetrics,
717749
...(filterRatio !== null
@@ -723,6 +755,9 @@ function updateNodeMetrics(
723755
...(joinMetrics !== null
724756
? joinMetrics
725757
: []),
758+
...(generateMetrics !== null
759+
? [generateMetrics]
760+
: []),
726761
];
727762
}
728763

spark-ui/src/utils/FormatUtils.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,13 @@ export function humanFileSize(bytes: number): string {
1111
.replace("TB", "TiB");
1212
}
1313

14+
export function capitalizeWords(text: string): string {
15+
return text
16+
.split(' ')
17+
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
18+
.join(' ');
19+
}
20+
1421
export function parseBytesString(str: string): number {
1522
return parse(
1623
str

0 commit comments

Comments
 (0)