Compare commits
2 Commits
db4e4f3baf
...
d5bb59468c
Author | SHA1 | Date | |
---|---|---|---|
|
d5bb59468c | ||
|
95fa82edaa |
@ -76,8 +76,11 @@ def buildresult_saver(result, buildstatus_path, config_path):
|
||||
|
||||
def build_image(path, dockerfile_path, image_name, build_args):
|
||||
logging.info(f"Starting building image {image_name}")
|
||||
build_args_str = " ".join(map(lambda x: f"--build-arg {x}", build_args))
|
||||
build_command = f"docker build --no-cache -t {image_name} {build_args_str} -f {dockerfile_path} ."
|
||||
build_command = f"docker build --no-cache -t {image_name} -f {dockerfile_path}"
|
||||
if len(build_args) != 0:
|
||||
build_args_str = " ".join(map(lambda x: f"--build-arg {x}", build_args))
|
||||
build_command += build_args_str
|
||||
build_command += " ."
|
||||
build_process = subprocess.run(build_command.split(" "), cwd=path, capture_output=True)
|
||||
build_output = f"stdout:\n{build_process.stdout.decode('utf-8')}\nstderr:\n{build_process.stderr.decode('utf-8')}"
|
||||
logging.info(f"Output of '{build_command}':\n\n{build_output}")
|
||||
@ -144,7 +147,7 @@ def check_env(config, src_dir, artifact_name, pkglist_path):
|
||||
}
|
||||
# Command to obtain the latest commit hash in a git repository (separated
|
||||
# into 2 parts for "--entrypoint"):
|
||||
gitcmd = ("git", "log -n 1 --pretty=format:%H")
|
||||
gitcmd = ("git", "-c safe.directory=* log -n 1 --pretty=format:%H")
|
||||
|
||||
logging.info("Checking software environment")
|
||||
pkglist_file = open(pkglist_path, "w")
|
||||
|
34
workflow/analysis.smk
Normal file
34
workflow/analysis.smk
Normal file
@ -0,0 +1,34 @@
|
||||
configfile: "config/config.yaml"
|
||||
PREFIX = config["prefix"]
|
||||
|
||||
include: "utils.smk"
|
||||
ARTIFACTS_FOLDER_NICKEL = config["folder_artifacts_nickel"]
|
||||
|
||||
def get_dates(conference):
|
||||
with open(f"dates/{conference}.txt", "r") as f:
|
||||
return [d.strip() for d in f.readlines() if len(d.strip()) > 0]
|
||||
|
||||
rule all:
|
||||
input:
|
||||
expand(f"{PREFIX}/aggregated/{{conference}}/{{type}}.csv", type=["pkgs", "artifact_hash", "build_status"], conference=config["conference"])
|
||||
|
||||
|
||||
rule aggregate_per_artifact:
|
||||
input:
|
||||
lambda w: expand(f"{PREFIX}/{{{{conference}}}}/{{{{type}}}}/{{{{artifact}}}}/{{date}}.csv", date = get_dates(w["conference"]))
|
||||
output:
|
||||
f"{PREFIX}/aggregated/{{conference}}/{{type}}/{{artifact}}.csv"
|
||||
shell:
|
||||
"cat {input} > {output}"
|
||||
|
||||
rule aggregate_all:
|
||||
input:
|
||||
lambda w: expand(f"{PREFIX}/{{{{conference}}}}/{{{{type}}}}/{{artifact}}/{{date}}.csv", date = get_dates(w["conference"]), artifact=get_artifacts_to_build(ARTIFACTS_FOLDER_NICKEL + "/" + w['conference']))
|
||||
#lambda w: expand(f"{PREFIX}/aggregated/{{{{conference}}}}/{{{{type}}}}/{{artifact}}.csv", artifact=get_artifacts_to_build(ARTIFACTS_FOLDER_NICKEL + "/" + w['conference']))
|
||||
output:
|
||||
f"{PREFIX}/aggregated/{{conference}}/{{type}}.csv"
|
||||
shell:
|
||||
"cat {input} > {output}"
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user