Integrated softenv analysis to workflow. Changed input and output cmd options in analysis scripts to take multiple files at once. Moved test and template artifacts in an excluded folder.

This commit is contained in:
antux18 2024-08-19 14:59:08 +02:00
parent 1e5dd77cd8
commit e943be4efe
7 changed files with 26 additions and 23 deletions

View File

@ -122,10 +122,11 @@ def main():
parser.add_argument( parser.add_argument(
"-i", "--input", "-i", "--input",
action = "append", action = "append",
nargs = "+",
help = help =
""" """
The CSV file used as input for the analysis function. Multiple files The CSV file used as input for the analysis function. Multiple files
can be specified by repeating this argument with different paths. can be specified at once by separating them with a space.
All the input files must be artifact hash logs generated by ECG. All the input files must be artifact hash logs generated by ECG.
""", """,
required = True required = True
@ -139,12 +140,13 @@ def main():
required = True required = True
) )
args = parser.parse_args() args = parser.parse_args()
input_paths = args.input inputs = args.input
output_path = args.output output_path = args.output
# Parsing the input files: # Parsing the input files:
input_table = [] input_table = []
for path in input_paths: for i in inputs:
for path in i:
input_file = open(path) input_file = open(path)
input_table += list(csv.reader(input_file)) input_table += list(csv.reader(input_file))
input_file.close() input_file.close()

View File

@ -55,10 +55,11 @@ def main():
parser.add_argument( parser.add_argument(
"-i", "--input", "-i", "--input",
action = "append", action = "append",
nargs = "+",
help = help =
""" """
The CSV file used as input for the analysis function. Multiple files The CSV file used as input for the analysis function. Multiple files
can be specified by repeating this argument with different paths. can be specified at once by separating them with a space.
All the input files must be build status logs generated by ECG. All the input files must be build status logs generated by ECG.
""", """,
required = True required = True
@ -72,12 +73,13 @@ def main():
required = True required = True
) )
args = parser.parse_args() args = parser.parse_args()
input_paths = args.input inputs = args.input
output_path = args.output output_path = args.output
# Parsing the input files: # Parsing the input files:
input_table = [] input_table = []
for path in input_paths: for i in inputs:
for path in i:
input_file = open(path) input_file = open(path)
input_table += list(csv.reader(input_file)) input_table += list(csv.reader(input_file))
input_file.close() input_file.close()

View File

@ -160,10 +160,11 @@ def main():
parser.add_argument( parser.add_argument(
"-i", "--input", "-i", "--input",
action = "append", action = "append",
nargs = "+",
help = help =
""" """
The CSV file used as input for the analysis function. Multiple files The CSV file used as input for the analysis function. Multiple files
can be specified by repeating this argument with different paths. can be specified at once by separating them with a space.
All the input files must be package lists generated by ECG. All the input files must be package lists generated by ECG.
""", """,
required = True required = True
@ -177,13 +178,14 @@ def main():
required = True required = True
) )
args = parser.parse_args() args = parser.parse_args()
input_paths = args.input inputs = args.input
output_path = args.output output_path = args.output
analysis_type = args.analysis_type analysis_type = args.analysis_type
# Parsing the input files: # Parsing the input files:
input_table = [] input_table = []
for path in input_paths: for i in inputs:
for path in i:
input_file = open(path) input_file = open(path)
input_table += list(csv.reader(input_file)) input_table += list(csv.reader(input_file))
input_file.close() input_file.close()

View File

@ -1,2 +0,0 @@
template, unknown_error, 0
test, unknown_error, 0
1 template unknown_error 0
template unknown_error 0
test unknown_error 0

View File

@ -35,7 +35,6 @@ rule check_all:
input: input:
expand(f"{ARTIFACTS_FOLDER_JSON}/{{artifact}}.json", artifact=ARTIFACTS) expand(f"{ARTIFACTS_FOLDER_JSON}/{{artifact}}.json", artifact=ARTIFACTS)
rule check_artifact: rule check_artifact:
input: input:
"flake.nix", "flake.nix",
@ -99,7 +98,7 @@ rule all_analysis:
rule softenv_analysis: rule softenv_analysis:
input: input:
pkg = expand(f"-i {PREFIX}/pkgs/{{artifact}}/{{date}}.csv", expand(f"{PREFIX}/pkgs/{{artifact}}/{{date}}.csv",
artifact=ARTIFACTS,\ artifact=ARTIFACTS,\
date=DATE date=DATE
), ),
@ -109,6 +108,6 @@ rule softenv_analysis:
date=DATE date=DATE
), ),
shell: shell:
expand(f"python3 {ANALYSIS_DIR}/softenv_analysis.py -t {{analysis_type}} {{input.pkg}}",\ expand(f"python3 {ANALYSIS_DIR}/softenv_analysis.py -t {{analysis_type}} -i {{{{input}}}} -o {{{{output}}}}",\
analysis_type = ["sources-stats", "pkgs-changes"] analysis_type = ["sources-stats", "pkgs-changes"]
) )