Switched from an empty hash to indicate download failure to a hash equal to "-1". Fixed a bug where the cache folder would not be removed in case artifact download failed.

This commit is contained in:
antux18 2024-08-05 16:29:07 +02:00
parent 199d1dc282
commit ea6d78dac2
2 changed files with 5 additions and 4 deletions

View File

@ -72,6 +72,7 @@ The following are the possible results of the build:
- `artifact_unavailable`: The artifact could not be downloaded. - `artifact_unavailable`: The artifact could not be downloaded.
- `dockerfile_not_found`: No Dockerfile has been found in the location specified in the configuration file. - `dockerfile_not_found`: No Dockerfile has been found in the location specified in the configuration file.
- `script_crash`: An error has occurred with the script itself. - `script_crash`: An error has occurred with the script itself.
- `job_time_exceeded`: When running on a batch system such as OAR, this error indicates that the script exceeded the allocated run time and had to be terminated.
- `unknown_error`: Any other error. - `unknown_error`: Any other error.
### Artifact hash log ### Artifact hash log

8
ecg.py
View File

@ -65,7 +65,7 @@ def download_file(url, dest):
str str
Hash of the downloaded file, or empty string if download failed. Hash of the downloaded file, or empty string if download failed.
""" """
file_hash = "" file_hash = "-1"
try: try:
req = requests.get(url) req = requests.get(url)
if req.status_code != 404: if req.status_code != 404:
@ -118,7 +118,7 @@ def download_sources(config, arthashlog_path, dl_dir, use_cache):
artifact_path = artifact_file.name artifact_path = artifact_file.name
artifact_hash = download_file(url, artifact_path) artifact_hash = download_file(url, artifact_path)
# If download was successful: # If download was successful:
if artifact_hash != "": if artifact_hash != "-1":
if config["type"] == "zip": if config["type"] == "zip":
artifact = zipfile.ZipFile(artifact_path) artifact = zipfile.ZipFile(artifact_path)
elif config["type"] == "tar": elif config["type"] == "tar":
@ -127,6 +127,7 @@ def download_sources(config, arthashlog_path, dl_dir, use_cache):
artifact.extractall(artifact_dir) artifact.extractall(artifact_dir)
# If download failed: # If download failed:
else: else:
os.rmdir(artifact_dir)
artifact_dir = "" artifact_dir = ""
# Logging the current hash of the artifact: # Logging the current hash of the artifact:
arthashlog_file = open(arthashlog_path, "a") arthashlog_file = open(arthashlog_path, "a")
@ -316,7 +317,6 @@ def check_env(config, src_dir, image_name, pkglist_path):
logging.info("Checking Git packages") logging.info("Checking Git packages")
for repo in config["git_packages"]: for repo in config["git_packages"]:
pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", repo["location"], "--entrypoint", gitcmd[0], image_name] + gitcmd[1].split(" "), cwd=path, capture_output=True) pkglist_process = subprocess.run(["docker", "run", "--rm", "-w", repo["location"], "--entrypoint", gitcmd[0], image_name] + gitcmd[1].split(" "), cwd=path, capture_output=True)
print(pkglist_process.stderr.decode('utf-8'))
repo_row = f"{repo['name']},{pkglist_process.stdout.decode('utf-8')},git" repo_row = f"{repo['name']},{pkglist_process.stdout.decode('utf-8')},git"
pkglist_file.write(f"{repo_row}\n") pkglist_file.write(f"{repo_row}\n")
@ -476,7 +476,7 @@ def main():
log_file = open(log_path, "a") log_file = open(log_path, "a")
log_file.write(formatted_err) log_file.write(formatted_err)
log_file.close() log_file.close()
print(formatted_err) logging.error(formatted_err)
status = "script_crash" status = "script_crash"
buildresult_saver(status, buildstatus_path, config_path) buildresult_saver(status, buildstatus_path, config_path)