2024-08-06 16:50:07 +02:00
|
|
|
#!/bin/python3
|
|
|
|
|
|
|
|
"""
|
|
|
|
This script performs a build status analysis on the outputs of the workflow
|
|
|
|
to generate tables that can then be plotted by another program.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import csv
|
|
|
|
import os
|
2024-08-07 17:31:35 +02:00
|
|
|
import datetime
|
2024-08-06 16:50:07 +02:00
|
|
|
|
2024-08-07 11:22:54 +02:00
|
|
|
def analysis(input_table):
|
2024-08-06 16:50:07 +02:00
|
|
|
"""
|
2024-08-07 11:22:54 +02:00
|
|
|
Analyzes the given build status table to count the results of the building
|
2024-08-06 16:50:07 +02:00
|
|
|
of the Dockerfile for each category.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2024-08-07 11:22:54 +02:00
|
|
|
input_table: str
|
|
|
|
Table to analyse.
|
2024-08-06 16:50:07 +02:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
dict
|
|
|
|
Output table of the analysis in the form of a dict with headers as keys.
|
|
|
|
"""
|
|
|
|
buildstatus = {}
|
2024-08-07 11:22:54 +02:00
|
|
|
for row in input_table:
|
|
|
|
# Third column is the result:
|
|
|
|
if row[2] not in buildstatus:
|
|
|
|
buildstatus[row[2]] = 1
|
|
|
|
else:
|
|
|
|
buildstatus[row[2]] += 1
|
2024-08-06 16:50:07 +02:00
|
|
|
return buildstatus
|
|
|
|
|
|
|
|
def main():
|
|
|
|
# Command line arguments parsing:
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
prog = "buildstatus_analysis",
|
|
|
|
description =
|
|
|
|
"""
|
|
|
|
This script performs a build status analysis on the outputs of the
|
|
|
|
workflow to generate tables that can then be plotted by another program.
|
|
|
|
The generated table gives the amount of images that have been
|
2024-08-07 16:51:19 +02:00
|
|
|
built successfully, and the amount of images that failed to build,
|
2024-08-06 16:50:07 +02:00
|
|
|
for each category of error.
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-v", "--verbose",
|
|
|
|
action = "store_true",
|
|
|
|
help = "Shows more details on what is being done."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-i", "--input",
|
|
|
|
action = "append",
|
|
|
|
help =
|
|
|
|
"""
|
|
|
|
The CSV file used as input for the analysis function. Multiple files
|
|
|
|
can be specified by repeating this argument with different paths.
|
|
|
|
All the input files must be build status logs generated by ECG.
|
|
|
|
""",
|
|
|
|
required = True
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-o", "--output",
|
|
|
|
help =
|
|
|
|
"""
|
|
|
|
Path to the output CSV file that will be created by the analysis function.
|
|
|
|
""",
|
|
|
|
required = True
|
|
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
input_paths = args.input
|
|
|
|
output_path = args.output
|
|
|
|
|
|
|
|
# Parsing the input files:
|
2024-08-07 11:22:54 +02:00
|
|
|
input_table = []
|
2024-08-06 16:50:07 +02:00
|
|
|
for path in input_paths:
|
|
|
|
input_file = open(path)
|
2024-08-07 11:22:54 +02:00
|
|
|
input_table += list(csv.reader(input_file))
|
2024-08-06 16:50:07 +02:00
|
|
|
input_file.close()
|
|
|
|
|
|
|
|
# Analyzing the inputs:
|
2024-08-07 11:22:54 +02:00
|
|
|
output_dict = analysis(input_table)
|
2024-08-07 17:31:35 +02:00
|
|
|
# Adding the current time to every row:
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
timestamp = str(datetime.datetime.timestamp(now))
|
|
|
|
output_dict["timestamp"] = timestamp
|
2024-08-06 16:50:07 +02:00
|
|
|
|
|
|
|
# Writing analysis to output file:
|
2024-08-07 11:22:54 +02:00
|
|
|
output_file = open(output_path, "w+")
|
2024-08-06 16:50:07 +02:00
|
|
|
dict_writer = csv.DictWriter(output_file, fieldnames=output_dict.keys())
|
|
|
|
dict_writer.writeheader()
|
|
|
|
dict_writer.writerow(output_dict)
|
|
|
|
output_file.close()
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|