Added initial support for Python venvs, close #21. Disabled the Docker build cache by default, and added an option to reenable it, close #23. Removed conditions on the existance of git_packages and misc_packages, because the workflow enforces their existance with the Nickel artifact contract.

This commit is contained in:
antux18 2024-07-24 18:05:31 +02:00
parent aab563fbfe
commit d2768d5373
14 changed files with 70 additions and 23 deletions

View File

@ -11,5 +11,8 @@
{ name = "bazel-4.0.0-installer-linux-x86_64", url = "https://github.com/bazelbuild/bazel/releases/download/4.0.0/bazel-4.0.0-installer-linux-x86_64.sh" },
{ name = "rust", url = "https://sh.rustup.rs" },
{ name = "mariadb-connector-cpp-1.0.0-ubuntu-groovy-amd64", url = "https://dlm.mariadb.com/1601342/Connectors/cpp/connector-cpp-1.0.0/mariadb-connector-cpp-1.0.0-ubuntu-groovy-amd64.tar.gz" }
],
python_venvs = [
{ path = "/home/k9db/experiments/scripts/plotting/venv" }
]
}

View File

@ -27,4 +27,5 @@
url = "http://biodynamo-lfs.web.cern.ch/biodynamo-lfs/third-party/paraview_v5.9.0_ubuntu-20.04_default.tar.gz"
},
],
python_venvs = []
}

View File

@ -8,4 +8,5 @@
package_managers = [ "dpkg", "pip" ],
git_packages = [],
misc_packages = [],
python_venvs = []
}

View File

@ -12,4 +12,7 @@
misc_packages = [
{ name = "cmake-3.23.1", url = "https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1.tar.gz" }
],
python_venvs = [
{ path = "/root/MocCUDA/py3_venv" }
]
}

View File

@ -2,7 +2,7 @@
artifact_url = "https://zenodo.org/record/6926481/files/koparasy/HPAC-v0.0.0-Puppeteer.zip",
type = "zip",
doi = "10.5555/3571885.3571974",
comment = "The package 'cmake-3.14.0-Linux-x86_64' is not specified below, because it is installed using a Bash script included in the artifact archive itself.",
comment = "The package 'cmake-3.14.0-Linux-x86_64' is not specified below, because it is installed using a Bash script included in the artifact archive itself. The file puppet_env.sh is missing and may hold information about a possible Python venv.",
image_name = "puppeteer:artefact",
dockerfile_location = "koparasy-HPAC-2723bb8/approx/puppeteer/container",
package_managers = [ "dpkg", "pip" ],
@ -10,4 +10,5 @@
misc_packages = [
{ name = "HPAC", url = "https://github.com/koparasy/HPAC/archive/refs/heads/develop.zip" }
],
python_venvs = []
}

View File

@ -16,5 +16,6 @@
name = "Miniconda3-py310_23.1.0-1-Linux-x86_64",
url = "https://repo.anaconda.com/miniconda/Miniconda3-py310_23.1.0-1-Linux-x86_64.sh"
}
]
],
python_venvs = []
}

View File

@ -11,5 +11,8 @@
],
misc_packages = [
{ name = "mpkg1", url = "http://example.com/package.zip" }
],
python_venvs = [
{ path = "path/to/venv" }
]
}

View File

@ -12,4 +12,5 @@
misc_packages = [
{ name = "mpkg1", url = "http://localhost/package1.zip" }
],
python_venvs = []
}

View File

@ -10,4 +10,5 @@
misc_packages = [
{ name = "Miniconda3-py37_4.12.0-Linux-x86_64", url = "https://repo.anaconda.com/miniconda/Miniconda3-py37_4.12.0-Linux-x86_64.sh" }
],
python_venvs = []
}

View File

@ -21,4 +21,5 @@
url = "https://www.python.org/ftp/python/3.10.2/Python-3.10.2.tgz"
}
],
python_venvs = []
}

View File

@ -14,4 +14,5 @@
misc_packages = [
{ name = "pip", url = "https://bootstrap.pypa.io/get-pip.py" }
],
python_venvs = []
}

36
ecg.py
View File

@ -165,7 +165,7 @@ def buildstatus_saver(output, buildstatus_path, config_path):
buildstatus_file.write(f"{artifact_name},{timestamp},unknown_error\n")
buildstatus_file.close()
def build_image(config, src_dir):
def build_image(config, src_dir, docker_cache = False):
"""
Builds the given Docker image in 'config'.
@ -177,15 +177,21 @@ def build_image(config, src_dir):
src_dir: str
Path to the directory where the artifact is stored.
docker_cache: bool
Enables or disables Docker 'build' cache.
Returns
-------
return_code: bool, build_output: str
Return code and output of Docker 'build'.
"""
cache_arg = "--no-cache"
if docker_cache:
cache_arg = ""
name = config["image_name"]
logging.info(f"Starting building image {name}")
path = os.path.join(src_dir, config["dockerfile_location"])
build_command = f"docker build -t {config['image_name']} ."
build_command = f"docker build {cache_arg} -t {config['image_name']} ."
build_process = subprocess.run(build_command.split(" "), cwd=path, capture_output=True)
build_output = f"stdout:\n{build_process.stdout.decode('utf-8')}\nstderr:\n{build_process.stderr.decode('utf-8')}"
# build_output = build_process.stderr.decode("utf-8")
@ -242,6 +248,7 @@ def check_env(config, src_dir, pkglist_path):
pkglist_file = open(pkglist_path, "w")
# pkglist_file.write("package,version,package_manager\n")
path = os.path.join(src_dir, config["dockerfile_location"])
# Package managers:
for pkgmgr in config["package_managers"]:
# "--entrypoint" requires command and arguments to be separated.
# This Docker 'run' option is used to prevent the shell from printing
@ -255,13 +262,15 @@ def check_env(config, src_dir, pkglist_path):
format_process = subprocess.run(f"cat << EOF | {listformat_cmd}\n{pkglist_process.stdout.decode('utf-8')}EOF", cwd=path, capture_output=True, shell=True)
pkglist = format_process.stdout.decode("utf-8")
pkglist_file.write(pkglist)
if "git_packages" in config.keys():
# Git packages:
logging.info("Checking Git packages")
for repo in config["git_packages"]:
pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", repo["location"], "--entrypoint", gitcmd[0], config["image_name"]] + gitcmd[1].split(" "), cwd=path, capture_output=True)
repo_row = f"{repo['name']},{pkglist_process.stdout.decode('utf-8')},git"
pkglist_file.write(f"{repo_row}\n")
if "misc_packages" in config.keys():
# Misc packages:
logging.info("Checking packages obtained outside of a package manager or VCS")
for pkg in config["misc_packages"]:
logging.info(f"Downloading package {pkg['name']} from {pkg['url']}")
@ -270,6 +279,16 @@ def check_env(config, src_dir, pkglist_path):
pkg_hash = download_file(pkg["url"], pkg_path)
pkg_row = f"{pkg['name']},{pkg_hash},misc"
pkglist_file.write(f"{pkg_row}\n")
# Python venvs:
for venv in config["python_venvs"]:
pipcmd = pkgmgr_cmd["pip"][0]
pipcmd_args = pkgmgr_cmd["pip"][1]
pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", venv["path"], "--entrypoint", "source", config["image_name"], ".bin/activate", "&&", pipcmd] + pipcmd_args.split(" "), cwd=path, capture_output=True)
format_process = subprocess.run(f"cat << EOF | {listformat_cmd}\n{pkglist_process.stdout.decode('utf-8')}EOF", cwd=path, capture_output=True, shell=True)
pkglist = format_process.stdout.decode("utf-8")
pkglist_file.write(pkglist)
pkglist_file.close()
def remove_image(config):
@ -337,6 +356,10 @@ def main():
help = "Path to the cache directory, where artifacts that are downloaded will be stored for future usage. " \
"If not specified, cache is disabled.",
required = False
),
parser.add_argument('--docker-cache',
action = 'store_true',
help = "Use cache for Docker 'build'."
)
args = parser.parse_args()
@ -352,8 +375,7 @@ def main():
# file:
print(f"Output will be stored in {log_path}")
logging.basicConfig(filename = log_path, filemode = "w", format = '%(levelname)s: %(message)s', level = logging.INFO)
verbose = args.verbose
if verbose:
if args.verbose:
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
# Parsing the input file including the configuration of the artifact's
@ -374,7 +396,7 @@ def main():
use_cache = True
dl_dir = cache_dir
artifact_dir = download_sources(config, arthashlog_path, dl_dir, use_cache)
return_code, build_output = build_image(config, artifact_dir)
return_code, build_output = build_image(config, artifact_dir, args.docker_cache)
if return_code == 0:
check_env(config, artifact_dir, pkglist_path)
remove_image(config)

2
run.sh
View File

@ -13,4 +13,4 @@ then
mkdir $CACHE_DIR
fi
./ecg.py $TESTFILE -v -p $OUTPUT_PATH/pkglist.csv -l $OUTPUT_PATH/log.txt -b $OUTPUT_PATH/build_status.csv -a $OUTPUT_PATH/artifact_hash.csv -c $CACHE_DIR
./ecg.py $TESTFILE -v -p $OUTPUT_PATH/pkglist.csv -l $OUTPUT_PATH/log.txt -b $OUTPUT_PATH/build_status.csv -a $OUTPUT_PATH/artifact_hash.csv -c $CACHE_DIR --docker-cache

View File

@ -39,6 +39,11 @@ in
| doc "URL of the package. Will be used to compute the hash"
| URL
},
PythonVenv = {
path
| doc "Path to the Python venv."
| FilePath
}
Artifact = {
artifact_url
| doc "URL where to download the artifact"
@ -67,5 +72,8 @@ in
misc_packages
| doc "Misc. packages downloaded from the container"
| Array MiscPackage,
python_venvs
| doc "Python venvs created in the container"
| Array PythonVenv
}
}