Skip to content

Commit 1a0ae04

Browse files
committed
Revert "use time_nds and convert to ms"
This reverts commit ca77568.
1 parent ca77568 commit 1a0ae04

File tree

1 file changed

+26
-26
lines changed

1 file changed

+26
-26
lines changed

nds/nds_power.py

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -415,7 +415,7 @@ def run_query_stream(input_prefix,
415415
"""
416416
queries_reports = []
417417
execution_time_list = []
418-
total_time_start_ms = time.time_ns() // 1_000_000
418+
total_time_start = time.time_ns()
419419
# check if it's running specific query or Power Run
420420
if len(query_dict) == 1:
421421
app_name = "NDS - " + list(query_dict.keys())[0]
@@ -457,9 +457,9 @@ def run_query_stream(input_prefix,
457457
profiler = Profiler(profiling_hook=profiling_hook, output_root=json_summary_folder)
458458

459459
# Run query
460-
power_start_ms = time.time_ns() // 1_000_000
461-
setup_time_ms = 0
462-
cleanup_time_ms = 0
460+
power_start = time.time_ns()
461+
setup_time = 0
462+
cleanup_time = 0
463463

464464
for query_name, q_content in query_dict.items():
465465
# show query name in Spark web UI
@@ -488,9 +488,9 @@ def run_query_stream(input_prefix,
488488

489489
# Accumulate setup and cleanup times
490490
if query_type == 'setup':
491-
setup_time_ms += query_time
491+
setup_time += query_time
492492
elif query_type == 'cleanup':
493-
cleanup_time_ms += query_time
493+
cleanup_time += query_time
494494

495495
queries_reports.append(q_report)
496496
if json_summary_folder:
@@ -505,36 +505,36 @@ def run_query_stream(input_prefix,
505505
else:
506506
summary_prefix = os.path.join(json_summary_folder, '')
507507
q_report.write_summary(prefix=summary_prefix)
508-
power_end_ms = time.time_ns() // 1_000_000
509-
power_elapse_ms = power_end_ms - power_start_ms
508+
power_end = time.time_ns()
509+
power_elapse = int((power_end - power_start)/1000)
510510

511511
# Calculate Power Test Time (excluding setup and cleanup)
512-
power_test_time_ms = power_elapse_ms - setup_time_ms - cleanup_time_ms
513-
512+
power_test_time = power_elapse - setup_time - cleanup_time
513+
514514
if not keep_sc:
515515
spark_session.sparkContext.stop()
516-
total_time_end_ms = time.time_ns() // 1_000_000
517-
total_elapse_ms = total_time_end_ms - total_time_start_ms
518-
print("====== Power Test Time: {} milliseconds ======".format(power_test_time_ms))
519-
if setup_time_ms > 0:
520-
print("====== Power Setup Time: {} milliseconds ======".format(setup_time_ms))
521-
if cleanup_time_ms > 0:
522-
print("====== Power Cleanup Time: {} milliseconds ======".format(cleanup_time_ms))
523-
print("====== Total Time: {} milliseconds ======".format(total_elapse_ms))
516+
total_time_end = time.time_ns()
517+
total_elapse = int((total_time_end - total_time_start)/1000)
518+
print("====== Power Test Time: {} milliseconds ======".format(power_test_time))
519+
if setup_time > 0:
520+
print("====== Power Setup Time: {} milliseconds ======".format(setup_time))
521+
if cleanup_time > 0:
522+
print("====== Power Cleanup Time: {} milliseconds ======".format(cleanup_time))
523+
print("====== Total Time: {} milliseconds ======".format(total_elapse))
524524
execution_time_list.append(
525-
(spark_app_id, "Power Start Time", power_start_ms))
525+
(spark_app_id, "Power Start Time", power_start))
526526
execution_time_list.append(
527-
(spark_app_id, "Power End Time", power_end_ms))
527+
(spark_app_id, "Power End Time", power_end))
528528
execution_time_list.append(
529-
(spark_app_id, "Power Test Time", power_test_time_ms))
530-
if setup_time_ms > 0:
529+
(spark_app_id, "Power Test Time", power_test_time))
530+
if setup_time > 0:
531531
execution_time_list.append(
532-
(spark_app_id, "Power Setup Time", setup_time_ms))
533-
if cleanup_time_ms > 0:
532+
(spark_app_id, "Power Setup Time", setup_time))
533+
if cleanup_time > 0:
534534
execution_time_list.append(
535-
(spark_app_id, "Power Cleanup Time", cleanup_time_ms))
535+
(spark_app_id, "Power Cleanup Time", cleanup_time))
536536
execution_time_list.append(
537-
(spark_app_id, "Total Time", total_elapse_ms))
537+
(spark_app_id, "Total Time", total_elapse))
538538

539539
header = ["application_id", "query", "time/milliseconds"]
540540
# print to driver stdout for quick view

0 commit comments

Comments
 (0)