2024-07-09 18:25:21 +02:00
|
|
|
#!/bin/python3
|
|
|
|
|
2024-07-10 12:23:07 +02:00
|
|
|
"""
|
2024-07-10 19:01:46 +02:00
|
|
|
ECG is a program that automates software environment checking
|
|
|
|
for scientific artifacts.
|
2024-07-10 12:23:07 +02:00
|
|
|
|
|
|
|
It is meant to be executed periodically to analyze variations in the
|
|
|
|
software environment of the artifact through time.
|
|
|
|
"""
|
|
|
|
|
2024-07-09 18:25:21 +02:00
|
|
|
import subprocess
|
|
|
|
import json
|
|
|
|
import yaml
|
|
|
|
import argparse
|
|
|
|
import tempfile
|
|
|
|
import os
|
|
|
|
import requests
|
|
|
|
import zipfile
|
|
|
|
import io
|
|
|
|
import tarfile
|
|
|
|
import pathlib
|
2024-07-10 12:23:07 +02:00
|
|
|
import logging
|
2024-07-10 19:01:46 +02:00
|
|
|
import datetime
|
2024-07-11 17:14:58 +02:00
|
|
|
import sys
|
|
|
|
|
2024-07-10 12:23:07 +02:00
|
|
|
# Paths:
|
2024-07-11 17:14:58 +02:00
|
|
|
pkglist_path = "pkglist.csv" # Package list being generated
|
|
|
|
log_path = "log.txt" # Output of the program
|
|
|
|
buildstatus_path = "build_status.csv" # Summary of the build process of the image
|
2024-07-09 18:25:21 +02:00
|
|
|
|
2024-07-10 12:23:07 +02:00
|
|
|
# Commands to list installed packages along with their versions and the name
|
2024-07-10 15:44:01 +02:00
|
|
|
# of the package manager, depending on the package managers.
|
|
|
|
# Each package manager is associated with a tuple, the first item being
|
|
|
|
# the query command, and the second being the command that will format
|
|
|
|
# the output of the query command (this one can be an empty string in case
|
|
|
|
# the formatting part is already done using the options of the first command).
|
|
|
|
# The first needs to be run on the container, and the second on the host,
|
|
|
|
# to take into account container images that do not have the formatting
|
|
|
|
# packages installed.
|
2024-07-10 12:23:07 +02:00
|
|
|
pkgmgr_cmd = {
|
2024-07-10 15:44:01 +02:00
|
|
|
"dpkg": ("dpkg -l", "awk 'NR>5 {print $2 \",\" $3 \",\" \"dpkg\"}'"), \
|
|
|
|
"rpm":("rpm -qa --queryformat '%{NAME},%{VERSION},rpm\\n'", ""), \
|
|
|
|
"pacman":("pacman -Q", "awk '{print $0 \",\" $1 \",pacman\"}'"), \
|
|
|
|
"pip":("pip freeze", "sed 's/==/,/g' | awk '{print $0 \",pip\"}'"), \
|
|
|
|
"conda":("/root/.conda/bin/conda list -e", "sed 's/=/ /g' | awk 'NR>3 {print $1 \",\" $2 \",conda\"}'")
|
2024-07-10 12:23:07 +02:00
|
|
|
}
|
2024-07-09 18:25:21 +02:00
|
|
|
|
2024-07-10 14:50:57 +02:00
|
|
|
# Command to obtain the latest commit hash in a git repository:
|
|
|
|
gitcmd = "git log -n 1 --pretty=format:%H"
|
|
|
|
|
2024-07-09 18:25:21 +02:00
|
|
|
def download_sources(config):
|
2024-07-10 12:23:07 +02:00
|
|
|
"""
|
|
|
|
Downloads the source of the artifact in 'config'.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
config: dict
|
|
|
|
Parsed YAML config file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-07-10 17:06:54 +02:00
|
|
|
temp_dir: tempfile.TemporaryDirectory
|
2024-07-10 12:23:07 +02:00
|
|
|
The directory where the artifact is downloaded to.
|
|
|
|
"""
|
2024-07-09 18:25:21 +02:00
|
|
|
url = config["artifact_url"]
|
2024-07-10 17:06:54 +02:00
|
|
|
logging.info(f"Downloading artifact from {url}")
|
2024-07-09 18:25:21 +02:00
|
|
|
temp_dir = tempfile.TemporaryDirectory()
|
2024-07-09 18:45:25 +02:00
|
|
|
req = requests.get(url)
|
2024-07-09 18:25:21 +02:00
|
|
|
if config["type"] == "zip":
|
2024-07-10 17:06:54 +02:00
|
|
|
artifact = zipfile.ZipFile(io.BytesIO(req.content))
|
2024-07-09 18:25:21 +02:00
|
|
|
elif config["type"] == "tgz":
|
2024-07-10 17:06:54 +02:00
|
|
|
artifact = tarfile.open(fileobj=io.BytesIO(req.content))
|
|
|
|
logging.info(f"Extracting artifact at {temp_dir.name}")
|
|
|
|
artifact.extractall(temp_dir.name)
|
2024-07-09 18:25:21 +02:00
|
|
|
return temp_dir
|
|
|
|
|
|
|
|
def build_image(config, src_dir):
|
2024-07-10 12:23:07 +02:00
|
|
|
"""
|
|
|
|
Builds the given Docker image in 'config'.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
config: dict
|
2024-07-11 17:14:58 +02:00
|
|
|
Parsed YAML config file.
|
2024-07-10 12:23:07 +02:00
|
|
|
|
|
|
|
src_dir: tempfile.TemporaryDirectory
|
|
|
|
The directory where the artifact is stored.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
return_code: int
|
|
|
|
Return code of the Docker 'build' command.
|
|
|
|
"""
|
2024-07-11 17:14:58 +02:00
|
|
|
name = config["image_name"]
|
2024-07-09 18:25:21 +02:00
|
|
|
logging.info(f"Starting building image {name}")
|
2024-07-11 17:14:58 +02:00
|
|
|
path = os.path.join(src_dir, config["dockerfile_location"])
|
|
|
|
build_command = "docker build -t " + config["image_name"] + " ."
|
2024-07-12 11:21:14 +02:00
|
|
|
build_process = subprocess.run(build_command.split(" "), cwd=path, capture_output=True)
|
|
|
|
logging.info(f"Output of '{build_command}':")
|
|
|
|
logging.info(build_process.stdout)
|
2024-07-09 18:25:21 +02:00
|
|
|
return_code = build_process.returncode
|
2024-07-10 17:06:54 +02:00
|
|
|
logging.info(f"Command '{build_command}' exited with code {return_code}")
|
2024-07-09 18:25:21 +02:00
|
|
|
return return_code == 0
|
|
|
|
|
|
|
|
def check_env(config, src_dir):
|
2024-07-10 12:23:07 +02:00
|
|
|
"""
|
|
|
|
Builds a list of all software packages installed in the
|
|
|
|
Docker image given in 'config', depending on the package managers
|
|
|
|
specified in the configuration, then stores it in a CSV file.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
config: dict
|
2024-07-11 17:14:58 +02:00
|
|
|
Parsed YAML config file.
|
2024-07-10 12:23:07 +02:00
|
|
|
|
|
|
|
src_dir: tempfile.TemporaryDirectory
|
|
|
|
The directory where the artifact is stored.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None
|
|
|
|
"""
|
2024-07-11 17:14:58 +02:00
|
|
|
pkglist_file = open(pkglist_path, "w")
|
2024-07-10 10:35:30 +02:00
|
|
|
pkglist_file.write("Package,Version,Package manager\n")
|
2024-07-11 17:14:58 +02:00
|
|
|
path = os.path.join(src_dir, config["dockerfile_location"])
|
2024-07-09 18:25:21 +02:00
|
|
|
for pkgmgr in config["package_managers"]:
|
|
|
|
logging.info(f"Checking '{pkgmgr}'")
|
2024-07-11 17:14:58 +02:00
|
|
|
pkglist_process = subprocess.run(["docker", "run", "--rm", config["image_name"]] + pkgmgr_cmd[pkgmgr][0].split(" "), cwd=path, capture_output=True)
|
2024-07-10 17:06:54 +02:00
|
|
|
format_process = subprocess.run("cat << EOF | " + pkgmgr_cmd[pkgmgr][1] + "\n" + pkglist_process.stdout.decode("utf-8") + "EOF", cwd=path, capture_output=True, shell=True)
|
2024-07-10 15:44:01 +02:00
|
|
|
pkglist = format_process.stdout.decode("utf-8")
|
2024-07-10 10:35:30 +02:00
|
|
|
pkglist_file.write(pkglist)
|
2024-07-10 12:23:07 +02:00
|
|
|
if "git_packages" in config.keys():
|
2024-07-10 14:50:57 +02:00
|
|
|
logging.info("Checking Git packages")
|
|
|
|
for repo in config["git_packages"]:
|
2024-07-11 17:14:58 +02:00
|
|
|
pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", repo["location"], config["image_name"]] + gitcmd.split(" "), cwd=path, capture_output=True)
|
2024-07-10 17:06:54 +02:00
|
|
|
repo_row = repo["name"] + "," + pkglist_process.stdout.decode("utf-8") + ",git"
|
|
|
|
pkglist_file.write(repo_row + "\n")
|
|
|
|
if "misc_packages" in config.keys():
|
|
|
|
logging.info("Checking packages obtained outside of a package manager or VCS")
|
|
|
|
for pkg in config["misc_packages"]:
|
|
|
|
logging.info(f"Downloading package {pkg["name"]} from {pkg["url"]}")
|
|
|
|
req = requests.get(pkg["url"])
|
|
|
|
pkg_file = tempfile.NamedTemporaryFile()
|
|
|
|
pkg_file.write(req.content)
|
|
|
|
pkglist_process = subprocess.run("sha256sum " + pkg_file.name + " | cut -zd ' ' -f 1", cwd=path, capture_output=True, shell=True)
|
|
|
|
pkg_row = pkg["name"] + "," + pkglist_process.stdout.decode("utf-8") + ",misc"
|
|
|
|
pkglist_file.write(pkg_row + "\n")
|
2024-07-10 10:35:30 +02:00
|
|
|
pkglist_file.close()
|
2024-07-09 18:25:21 +02:00
|
|
|
|
|
|
|
def remove_image(config):
|
2024-07-10 12:23:07 +02:00
|
|
|
"""
|
|
|
|
Removes the Docker image given in 'config'.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
config: dict
|
2024-07-11 17:14:58 +02:00
|
|
|
Parsed YAML config file.
|
2024-07-10 12:23:07 +02:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None
|
|
|
|
"""
|
2024-07-11 17:14:58 +02:00
|
|
|
name = config["image_name"]
|
2024-07-09 18:25:21 +02:00
|
|
|
logging.info(f"Removing image '{name}'")
|
2024-07-12 11:21:14 +02:00
|
|
|
subprocess.run(["docker", "rmi", name], capture_output = True)
|
2024-07-09 18:25:21 +02:00
|
|
|
|
|
|
|
def main():
|
2024-07-11 17:14:58 +02:00
|
|
|
global pkglist_path, log_path, buildstatus_path
|
2024-07-10 19:01:46 +02:00
|
|
|
|
|
|
|
# Command line arguments parsing:
|
2024-07-09 18:25:21 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
2024-07-10 19:01:46 +02:00
|
|
|
prog = "ecg",
|
|
|
|
description = "ECG is a program that automates software environment checking for scientific artifacts. "
|
|
|
|
"It is meant to be executed periodically to analyze variations in the software environment of the artifact through time."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"config",
|
2024-07-11 17:14:58 +02:00
|
|
|
help = "The path to the configuration file of the artifact's Docker image."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-p", "--pkg-list",
|
|
|
|
help = "Path to the file where the package list generated by the program should be written."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-l", "--log-path",
|
|
|
|
help = "Path to the file where to log the output of the program."
|
2024-07-10 19:01:46 +02:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2024-07-11 17:14:58 +02:00
|
|
|
"-b", "--build-summary",
|
|
|
|
help = "Path to the file where to write the build summary of the Docker image given in the configuration file."
|
2024-07-10 19:01:46 +02:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2024-07-11 17:14:58 +02:00
|
|
|
"-c", "--cache-dir",
|
|
|
|
help = "Path to the cache directory, where artifact that are downloaded will be stored for future usage."
|
2024-07-10 19:01:46 +02:00
|
|
|
)
|
2024-07-11 17:14:58 +02:00
|
|
|
parser.add_argument('-v', '--verbose',
|
|
|
|
action = 'store_true',
|
|
|
|
help = "Shows more details on what is being done.")
|
2024-07-09 18:25:21 +02:00
|
|
|
args = parser.parse_args()
|
2024-07-10 12:23:07 +02:00
|
|
|
|
2024-07-11 17:14:58 +02:00
|
|
|
# Setting up the paths of the outputs:
|
|
|
|
if args.pkg_list != None:
|
|
|
|
pkglist_path = args.pkg_list
|
|
|
|
if args.log_path != None:
|
|
|
|
log_path = args.log_path
|
|
|
|
if args.build_summary != None:
|
|
|
|
buildstatus_path = args.build_summary
|
|
|
|
|
2024-07-12 11:21:14 +02:00
|
|
|
# Setting up the log: will be displayed both on stdout and to the specified
|
|
|
|
# file:
|
|
|
|
logging.basicConfig(filename = log_path, filemode = "w", format = '%(levelname)s: %(message)s', level = logging.INFO)
|
|
|
|
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
|
|
|
|
|
2024-07-11 17:14:58 +02:00
|
|
|
# Parsing the input YAML file including the configuration of
|
|
|
|
# the artifact's image:
|
|
|
|
config_file = open(args.config, "r")
|
|
|
|
config = yaml.safe_load(config_file)
|
|
|
|
config_file.close()
|
|
|
|
|
|
|
|
verbose = args.verbose
|
|
|
|
|
|
|
|
# if verbose:
|
|
|
|
# logging.info(f"Output will be stored in {output}")
|
|
|
|
|
|
|
|
src_dir = download_sources(config)
|
|
|
|
successful_build = build_image(config, src_dir.name)
|
|
|
|
if successful_build:
|
|
|
|
check_env(config, src_dir.name)
|
|
|
|
remove_image(config)
|
2024-07-09 18:25:21 +02:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|