Added initial support for Python venvs, close #21. Disabled the Docker build cache by default, and added an option to reenable it, close #23. Removed conditions on the existance of git_packages and misc_packages, because the workflow enforces their existance with the Nickel artifact contract.

This commit is contained in:
antux18 2024-07-24 18:05:31 +02:00
parent aab563fbfe
commit d2768d5373
14 changed files with 70 additions and 23 deletions

View File

@ -11,5 +11,8 @@
{ name = "bazel-4.0.0-installer-linux-x86_64", url = "https://github.com/bazelbuild/bazel/releases/download/4.0.0/bazel-4.0.0-installer-linux-x86_64.sh" }, { name = "bazel-4.0.0-installer-linux-x86_64", url = "https://github.com/bazelbuild/bazel/releases/download/4.0.0/bazel-4.0.0-installer-linux-x86_64.sh" },
{ name = "rust", url = "https://sh.rustup.rs" }, { name = "rust", url = "https://sh.rustup.rs" },
{ name = "mariadb-connector-cpp-1.0.0-ubuntu-groovy-amd64", url = "https://dlm.mariadb.com/1601342/Connectors/cpp/connector-cpp-1.0.0/mariadb-connector-cpp-1.0.0-ubuntu-groovy-amd64.tar.gz" } { name = "mariadb-connector-cpp-1.0.0-ubuntu-groovy-amd64", url = "https://dlm.mariadb.com/1601342/Connectors/cpp/connector-cpp-1.0.0/mariadb-connector-cpp-1.0.0-ubuntu-groovy-amd64.tar.gz" }
],
python_venvs = [
{ path = "/home/k9db/experiments/scripts/plotting/venv" }
] ]
} }

View File

@ -27,4 +27,5 @@
url = "http://biodynamo-lfs.web.cern.ch/biodynamo-lfs/third-party/paraview_v5.9.0_ubuntu-20.04_default.tar.gz" url = "http://biodynamo-lfs.web.cern.ch/biodynamo-lfs/third-party/paraview_v5.9.0_ubuntu-20.04_default.tar.gz"
}, },
], ],
python_venvs = []
} }

View File

@ -8,4 +8,5 @@
package_managers = [ "dpkg", "pip" ], package_managers = [ "dpkg", "pip" ],
git_packages = [], git_packages = [],
misc_packages = [], misc_packages = [],
python_venvs = []
} }

View File

@ -12,4 +12,7 @@
misc_packages = [ misc_packages = [
{ name = "cmake-3.23.1", url = "https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1.tar.gz" } { name = "cmake-3.23.1", url = "https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1.tar.gz" }
], ],
python_venvs = [
{ path = "/root/MocCUDA/py3_venv" }
]
} }

View File

@ -2,7 +2,7 @@
artifact_url = "https://zenodo.org/record/6926481/files/koparasy/HPAC-v0.0.0-Puppeteer.zip", artifact_url = "https://zenodo.org/record/6926481/files/koparasy/HPAC-v0.0.0-Puppeteer.zip",
type = "zip", type = "zip",
doi = "10.5555/3571885.3571974", doi = "10.5555/3571885.3571974",
comment = "The package 'cmake-3.14.0-Linux-x86_64' is not specified below, because it is installed using a Bash script included in the artifact archive itself.", comment = "The package 'cmake-3.14.0-Linux-x86_64' is not specified below, because it is installed using a Bash script included in the artifact archive itself. The file puppet_env.sh is missing and may hold information about a possible Python venv.",
image_name = "puppeteer:artefact", image_name = "puppeteer:artefact",
dockerfile_location = "koparasy-HPAC-2723bb8/approx/puppeteer/container", dockerfile_location = "koparasy-HPAC-2723bb8/approx/puppeteer/container",
package_managers = [ "dpkg", "pip" ], package_managers = [ "dpkg", "pip" ],
@ -10,4 +10,5 @@
misc_packages = [ misc_packages = [
{ name = "HPAC", url = "https://github.com/koparasy/HPAC/archive/refs/heads/develop.zip" } { name = "HPAC", url = "https://github.com/koparasy/HPAC/archive/refs/heads/develop.zip" }
], ],
python_venvs = []
} }

View File

@ -16,5 +16,6 @@
name = "Miniconda3-py310_23.1.0-1-Linux-x86_64", name = "Miniconda3-py310_23.1.0-1-Linux-x86_64",
url = "https://repo.anaconda.com/miniconda/Miniconda3-py310_23.1.0-1-Linux-x86_64.sh" url = "https://repo.anaconda.com/miniconda/Miniconda3-py310_23.1.0-1-Linux-x86_64.sh"
} }
] ],
python_venvs = []
} }

View File

@ -11,5 +11,8 @@
], ],
misc_packages = [ misc_packages = [
{ name = "mpkg1", url = "http://example.com/package.zip" } { name = "mpkg1", url = "http://example.com/package.zip" }
],
python_venvs = [
{ path = "path/to/venv" }
] ]
} }

View File

@ -12,4 +12,5 @@
misc_packages = [ misc_packages = [
{ name = "mpkg1", url = "http://localhost/package1.zip" } { name = "mpkg1", url = "http://localhost/package1.zip" }
], ],
python_venvs = []
} }

View File

@ -10,4 +10,5 @@
misc_packages = [ misc_packages = [
{ name = "Miniconda3-py37_4.12.0-Linux-x86_64", url = "https://repo.anaconda.com/miniconda/Miniconda3-py37_4.12.0-Linux-x86_64.sh" } { name = "Miniconda3-py37_4.12.0-Linux-x86_64", url = "https://repo.anaconda.com/miniconda/Miniconda3-py37_4.12.0-Linux-x86_64.sh" }
], ],
python_venvs = []
} }

View File

@ -21,4 +21,5 @@
url = "https://www.python.org/ftp/python/3.10.2/Python-3.10.2.tgz" url = "https://www.python.org/ftp/python/3.10.2/Python-3.10.2.tgz"
} }
], ],
python_venvs = []
} }

View File

@ -14,4 +14,5 @@
misc_packages = [ misc_packages = [
{ name = "pip", url = "https://bootstrap.pypa.io/get-pip.py" } { name = "pip", url = "https://bootstrap.pypa.io/get-pip.py" }
], ],
python_venvs = []
} }

62
ecg.py
View File

@ -165,7 +165,7 @@ def buildstatus_saver(output, buildstatus_path, config_path):
buildstatus_file.write(f"{artifact_name},{timestamp},unknown_error\n") buildstatus_file.write(f"{artifact_name},{timestamp},unknown_error\n")
buildstatus_file.close() buildstatus_file.close()
def build_image(config, src_dir): def build_image(config, src_dir, docker_cache = False):
""" """
Builds the given Docker image in 'config'. Builds the given Docker image in 'config'.
@ -177,15 +177,21 @@ def build_image(config, src_dir):
src_dir: str src_dir: str
Path to the directory where the artifact is stored. Path to the directory where the artifact is stored.
docker_cache: bool
Enables or disables Docker 'build' cache.
Returns Returns
------- -------
return_code: bool, build_output: str return_code: bool, build_output: str
Return code and output of Docker 'build'. Return code and output of Docker 'build'.
""" """
cache_arg = "--no-cache"
if docker_cache:
cache_arg = ""
name = config["image_name"] name = config["image_name"]
logging.info(f"Starting building image {name}") logging.info(f"Starting building image {name}")
path = os.path.join(src_dir, config["dockerfile_location"]) path = os.path.join(src_dir, config["dockerfile_location"])
build_command = f"docker build -t {config['image_name']} ." build_command = f"docker build {cache_arg} -t {config['image_name']} ."
build_process = subprocess.run(build_command.split(" "), cwd=path, capture_output=True) build_process = subprocess.run(build_command.split(" "), cwd=path, capture_output=True)
build_output = f"stdout:\n{build_process.stdout.decode('utf-8')}\nstderr:\n{build_process.stderr.decode('utf-8')}" build_output = f"stdout:\n{build_process.stdout.decode('utf-8')}\nstderr:\n{build_process.stderr.decode('utf-8')}"
# build_output = build_process.stderr.decode("utf-8") # build_output = build_process.stderr.decode("utf-8")
@ -242,6 +248,7 @@ def check_env(config, src_dir, pkglist_path):
pkglist_file = open(pkglist_path, "w") pkglist_file = open(pkglist_path, "w")
# pkglist_file.write("package,version,package_manager\n") # pkglist_file.write("package,version,package_manager\n")
path = os.path.join(src_dir, config["dockerfile_location"]) path = os.path.join(src_dir, config["dockerfile_location"])
# Package managers:
for pkgmgr in config["package_managers"]: for pkgmgr in config["package_managers"]:
# "--entrypoint" requires command and arguments to be separated. # "--entrypoint" requires command and arguments to be separated.
# This Docker 'run' option is used to prevent the shell from printing # This Docker 'run' option is used to prevent the shell from printing
@ -255,21 +262,33 @@ def check_env(config, src_dir, pkglist_path):
format_process = subprocess.run(f"cat << EOF | {listformat_cmd}\n{pkglist_process.stdout.decode('utf-8')}EOF", cwd=path, capture_output=True, shell=True) format_process = subprocess.run(f"cat << EOF | {listformat_cmd}\n{pkglist_process.stdout.decode('utf-8')}EOF", cwd=path, capture_output=True, shell=True)
pkglist = format_process.stdout.decode("utf-8") pkglist = format_process.stdout.decode("utf-8")
pkglist_file.write(pkglist) pkglist_file.write(pkglist)
if "git_packages" in config.keys():
logging.info("Checking Git packages") # Git packages:
for repo in config["git_packages"]: logging.info("Checking Git packages")
pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", repo["location"], "--entrypoint", gitcmd[0], config["image_name"]] + gitcmd[1].split(" "), cwd=path, capture_output=True) for repo in config["git_packages"]:
repo_row = f"{repo['name']},{pkglist_process.stdout.decode('utf-8')},git" pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", repo["location"], "--entrypoint", gitcmd[0], config["image_name"]] + gitcmd[1].split(" "), cwd=path, capture_output=True)
pkglist_file.write(f"{repo_row}\n") repo_row = f"{repo['name']},{pkglist_process.stdout.decode('utf-8')},git"
if "misc_packages" in config.keys(): pkglist_file.write(f"{repo_row}\n")
logging.info("Checking packages obtained outside of a package manager or VCS")
for pkg in config["misc_packages"]: # Misc packages:
logging.info(f"Downloading package {pkg['name']} from {pkg['url']}") logging.info("Checking packages obtained outside of a package manager or VCS")
pkg_file = tempfile.NamedTemporaryFile() for pkg in config["misc_packages"]:
pkg_path = pkg_file.name logging.info(f"Downloading package {pkg['name']} from {pkg['url']}")
pkg_hash = download_file(pkg["url"], pkg_path) pkg_file = tempfile.NamedTemporaryFile()
pkg_row = f"{pkg['name']},{pkg_hash},misc" pkg_path = pkg_file.name
pkglist_file.write(f"{pkg_row}\n") pkg_hash = download_file(pkg["url"], pkg_path)
pkg_row = f"{pkg['name']},{pkg_hash},misc"
pkglist_file.write(f"{pkg_row}\n")
# Python venvs:
for venv in config["python_venvs"]:
pipcmd = pkgmgr_cmd["pip"][0]
pipcmd_args = pkgmgr_cmd["pip"][1]
pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", venv["path"], "--entrypoint", "source", config["image_name"], ".bin/activate", "&&", pipcmd] + pipcmd_args.split(" "), cwd=path, capture_output=True)
format_process = subprocess.run(f"cat << EOF | {listformat_cmd}\n{pkglist_process.stdout.decode('utf-8')}EOF", cwd=path, capture_output=True, shell=True)
pkglist = format_process.stdout.decode("utf-8")
pkglist_file.write(pkglist)
pkglist_file.close() pkglist_file.close()
def remove_image(config): def remove_image(config):
@ -337,6 +356,10 @@ def main():
help = "Path to the cache directory, where artifacts that are downloaded will be stored for future usage. " \ help = "Path to the cache directory, where artifacts that are downloaded will be stored for future usage. " \
"If not specified, cache is disabled.", "If not specified, cache is disabled.",
required = False required = False
),
parser.add_argument('--docker-cache',
action = 'store_true',
help = "Use cache for Docker 'build'."
) )
args = parser.parse_args() args = parser.parse_args()
@ -352,8 +375,7 @@ def main():
# file: # file:
print(f"Output will be stored in {log_path}") print(f"Output will be stored in {log_path}")
logging.basicConfig(filename = log_path, filemode = "w", format = '%(levelname)s: %(message)s', level = logging.INFO) logging.basicConfig(filename = log_path, filemode = "w", format = '%(levelname)s: %(message)s', level = logging.INFO)
verbose = args.verbose if args.verbose:
if verbose:
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
# Parsing the input file including the configuration of the artifact's # Parsing the input file including the configuration of the artifact's
@ -374,7 +396,7 @@ def main():
use_cache = True use_cache = True
dl_dir = cache_dir dl_dir = cache_dir
artifact_dir = download_sources(config, arthashlog_path, dl_dir, use_cache) artifact_dir = download_sources(config, arthashlog_path, dl_dir, use_cache)
return_code, build_output = build_image(config, artifact_dir) return_code, build_output = build_image(config, artifact_dir, args.docker_cache)
if return_code == 0: if return_code == 0:
check_env(config, artifact_dir, pkglist_path) check_env(config, artifact_dir, pkglist_path)
remove_image(config) remove_image(config)

2
run.sh
View File

@ -13,4 +13,4 @@ then
mkdir $CACHE_DIR mkdir $CACHE_DIR
fi fi
./ecg.py $TESTFILE -v -p $OUTPUT_PATH/pkglist.csv -l $OUTPUT_PATH/log.txt -b $OUTPUT_PATH/build_status.csv -a $OUTPUT_PATH/artifact_hash.csv -c $CACHE_DIR ./ecg.py $TESTFILE -v -p $OUTPUT_PATH/pkglist.csv -l $OUTPUT_PATH/log.txt -b $OUTPUT_PATH/build_status.csv -a $OUTPUT_PATH/artifact_hash.csv -c $CACHE_DIR --docker-cache

View File

@ -39,6 +39,11 @@ in
| doc "URL of the package. Will be used to compute the hash" | doc "URL of the package. Will be used to compute the hash"
| URL | URL
}, },
PythonVenv = {
path
| doc "Path to the Python venv."
| FilePath
}
Artifact = { Artifact = {
artifact_url artifact_url
| doc "URL where to download the artifact" | doc "URL where to download the artifact"
@ -67,5 +72,8 @@ in
misc_packages misc_packages
| doc "Misc. packages downloaded from the container" | doc "Misc. packages downloaded from the container"
| Array MiscPackage, | Array MiscPackage,
python_venvs
| doc "Python venvs created in the container"
| Array PythonVenv
} }
} }