Voici la documentation : https://geoplateforme.pages.gpf-tech.ign.fr/documentation

Skip to content
Validations sur la source (37)
......@@ -16,6 +16,12 @@ Unreleased
-->
## 2.0.0 - 2022-03-20
### Added
- Handle v2 input/output
## 1.0.0 - 2022-03-17
- Release 1
......
......@@ -40,7 +40,7 @@ __uri_repository__ = (
__uri_tracker__ = f"{__uri_repository__}issues/"
__uri__ = __uri_repository__
__version__ = "1.0.0"
__version__ = "2.0.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
......
......@@ -18,12 +18,14 @@ from gpf_entrepot_toolbelt.orchestrator.models import (
)
from gpf_entrepot_toolbelt.orchestrator.status import Status
from gpf_entrepot_toolbelt.utils.dict_counter import count_dict_values
from gpf_entrepot_toolbelt.utils.exit_program import fail_and_exit
from gpf_entrepot_toolbelt.utils.gpf_logger import gpf_logger_script
# package
from gpf_check_md5.__about__ import (
__author__,
__cli_usage__,
__executable_name__,
__summary__,
__title__,
__title_clean__,
......@@ -126,7 +128,6 @@ def main(argv: List[str] = None):
logger = gpf_logger_script(args.verbosity, __title_clean__)
# -- RUN LOGIC --
# check livraison
if not check_livraison_structure(
work_dir_path=args.work_dir_path,
......@@ -137,28 +138,13 @@ def main(argv: List[str] = None):
"Upload (aka livraison) doesn't complies with the expected structure."
f"{args.work_dir_path}. Check the job logs for more details."
)
logger.error(error_message)
failed_output = OutputDataStructure(
executionId=getenv(
"CI_JOB_ID",
f"{__title_clean__}-{__version__}-{datetime.now():%Y-%m-%d_%H%M%s}",
),
status=Status.TECHNICAL_ERROR,
failures=["Bad upload (livraison) structure"],
trace=error_message,
fail_and_exit(
work_dir=args.work_dir_path,
error_message=error_message,
package_title=__title_clean__,
package_version=__version__,
package_exec_name=__executable_name__,
)
try:
out_file = Path(args.work_dir_path, "output.json")
out_file.parent.mkdir(parents=True, exist_ok=True)
with out_file.open("w", encoding="UTF8") as wf:
wf.write(failed_output.to_json())
except Exception as critical_error:
# probably that the work dir is not writable, let's try to write somewhere
# else and to the stdout
logger.critical(f"Unable to write the output file. Trace: {critical_error}")
sys.exit(error_message)
# open configuration file
try:
parameters = GpfOrchestratorParameters.from_json(
......@@ -170,68 +156,33 @@ def main(argv: List[str] = None):
f"({args.work_dir_path}/{args.input_configuration_file_name} failed. "
f"Trace: {error}"
)
logger.error(error_message)
failed_output = OutputDataStructure(
executionId=getenv(
"CI_JOB_ID",
f"{__title_clean__}-{__version__}-{datetime.now():%Y-%m-%d_%H%M%s}",
),
status=Status.TECHNICAL_ERROR,
failures=["Bad input configuration file."],
trace=error_message,
fail_and_exit(
work_dir=args.work_dir_path,
error_message=error_message,
package_title=__title_clean__,
package_version=__version__,
package_exec_name=__executable_name__,
)
out_file = Path(args.work_dir_path, "output.json")
with out_file.open("w", encoding="UTF8") as wf:
wf.write(failed_output.to_json())
sys.exit(error_message)
# create output object
result_output = OutputDataStructure(executionId=parameters.executionId)
# Run
try:
run_result = run(
work_dir=args.work_dir_path,
upload_dir_name=args.upload_dir_name,
chunk_size=args.chunk_size,
)
if run_result[0] != 0:
result_output.status = (
Status.TECHNICAL_ERROR if run_result[0] >= 3 else Status(run_result[0])
parameters.set_pipeline_status(__executable_name__, run_result[0])
if run_result[0] != Status.SUCCESS:
logger.error(
f"Running {__title__} (version {__version__}) failed. Trace: {run_result[1]}"
)
result_output.failures = run_result[1]
except Exception as error:
logger.error(
f"Running {__title__} (version {__version__}) failed. Trace: {error}"
)
result_output.failures = run_result[1]
result_output.status = Status.TECHNICAL_ERROR
result_output.trace = error
# write result into output file
out_file = Path(args.work_dir_path, "output.json")
with out_file.open("w", encoding="UTF8") as wf:
wf.write(result_output.to_json())
# in debug mode, print ouput to stdout
ct_success, ct_failure, ct_error = (
count_dict_values(run_result[1], Status.SUCCESS.name),
count_dict_values(run_result[1], Status.FAILURE.name),
count_dict_values(run_result[1], Status.TECHNICAL_ERROR.name),
)
parameters.set_pipeline_status(__executable_name__, Status.TECHNICAL_ERROR)
# TODO: make this kind of output a generic function
report_term = f"""Execution report:
{'-'*40}
Success: {ct_success:8}
Failure: {ct_failure:8}
Error: {ct_error:10}
{'-'*40}
Total: {ct_success+ct_failure+ct_error:10} tested files.
"""
logger.debug(report_term)
# create output file
parameters.create_output_file(args.work_dir_path)
# -- Stand alone execution
......
......@@ -85,7 +85,6 @@ def check_md5_file(filename: Path, status: dict, chunksize: int = 8192) -> int:
0 indique un SUCCESS
1 pour indiquer qu'il y a eu au moins une erreur d'un calcul md5
2 pour indiquer qu'il y a eu au moins une erreur technique
3 pour indiquer qu'il y a eu au moins une fois les deux erreurs
"""
result = 0
......@@ -120,14 +119,14 @@ def check_md5_file(filename: Path, status: dict, chunksize: int = 8192) -> int:
logger.error(f"TECHNICAL ERROR: {e}")
exit(os.EX_IOERR)
return result
return result if result < 2 else 2
def run(
work_dir: Path,
upload_dir_name: str = "upload",
chunk_size: int = 8192,
) -> Tuple[int, dict]:
) -> Tuple[Status, dict]:
"""Main function running the logic.
Args:
......@@ -145,10 +144,10 @@ def run(
exc: _description_
Returns:
int: 0 is SUCCESS \
- 1: at least one md5 error calculation \
- 2: at least one technical error \
- 3: at least both md5 calculation and technical error.
Status: SUCCESS \
- FAILURE: at least one md5 error calculation \
- TECHNICAL_ERROR: at least one technical error.
dict : check descriptions for each file
"""
# checks
if not check_path(
......@@ -189,8 +188,8 @@ def run(
status=status,
chunksize=chunk_size,
)
return result, status
result = result if result < 2 else 2 # Le résultat vaut 2 au maximum
return Status(result), status
# -- Stand alone execution
......
# change index-url for next lines
--extra-index-url https://gitlab.gpf-tech.ign.fr/api/v4/groups/55/-/packages/pypi/simple
gpf-entrepot-toolbelt==0.14.*
gpf-entrepot-toolbelt==0.17.*
{
"_id": "1231544456-1546546-164565",
"job_name": "",
"pipeline_status": {
"string": "string",
"job_name1" : "SUCCESS",
"job_name2" : "FAILURE"
},
"inputs": {
"uploads" : [{
"_id": "" ,
"type": "stringEnum(uploadType)",
"name": "",
"type_infos": {},
"srs" : "",
"extent": {"x1": 0 , "x2" : 0 , "y1": 0, "y2" : 0},
"size": 0,
"storage": {
"_id": "string",
"name": "string",
"type": "stringEnum(storageType)",
"type_infos": {}
}
}],
"stored_datas" : [{
"_id": "",
"type": "stringEnum(storedDataType)",
"name": "",
"type_infos": {},
"srs": "",
"extent": {},
"size": 0,
"storage": {
"_id": "string",
"name": "string",
"type": "stringEnum(storageType)",
"type_infos": {}
}
}]
},
"parameters":[{
"name": "",
"value": ""
}],
"global_variables": {
"postgresql": {
"user":"",
"pass":""
},
"swift": {
"identity_api_version": "",
"auth_url": "",
"project_domain_name": "",
"region_name": "",
"tenant_id": "",
"tenant_name":"",
"user_domain_name":"",
"username": "",
"password": ""
}
}
}
\ No newline at end of file
772ac1a55fab1122f3b369ee9cd31549 md5.txt
b5871a318190397c5878ff2bd9f326d3 oslandia.txt
{
"_id": "1231544456-1546546-164565",
"job_name": "",
"pipeline_status": {
"job_name1" : "SUCCESS",
"job_name2" : "FAILURE"
},
"inputs": {
"uploads" : [{
"_id": "" ,
"type": "stringEnum(uploadType)",
"name": "",
"type_infos": {},
"srs" : "",
"extent": {"x1": 0 , "x2" : 0 , "y1": 0, "y2" : 0},
"size": 0,
"storage": {
"_id": "string",
"name": "string",
"type": "stringEnum(storageType)",
"type_infos": {}
}
}],
"stored_datas" : [{
"_id": "",
"type": "stringEnum(storedDataType)",
"name": "",
"type_infos": {},
"srs": "",
"extent": {},
"size": 0,
"storage": {
"_id": "string",
"name": "string",
"type": "stringEnum(storageType)",
"type_infos": {}
}
}]
},
"parameters":[{
"name": "",
"value": ""
}],
"global_variables": {
"postgresql": {
"user":"",
"pass":""
},
"swift": {
"identity_api_version": "",
"auth_url": "",
"project_domain_name": "",
"region_name": "",
"tenant_id": "",
"tenant_name":"",
"user_domain_name":"",
"username": "",
"password": ""
}
}
}
\ No newline at end of file
......@@ -9,7 +9,9 @@
# standard
import glob
import json
import shutil
import unittest
from os import environ
from pathlib import Path
......@@ -34,7 +36,7 @@ def test_cli_run_simple(capsys):
"--uploaddir",
"upload",
"--input-configuration-filename",
"parameters_full_good_old.json",
"parameters_v2.json",
]
)
......@@ -50,7 +52,7 @@ def test_cli_run_env_vars(capsys):
Path("./tests/fixtures/livraisons/good/default").resolve()
)
environ["GPF_UPLOAD_DIR"] = "upload"
environ["GPF_INPUT_CONFIGURATION_FILENAME"] = "parameters_full_good_old.json"
environ["GPF_INPUT_CONFIGURATION_FILENAME"] = "parameters_v2.json"
cli.main([])
......@@ -60,6 +62,7 @@ def test_cli_run_env_vars(capsys):
assert err == ""
@pytest.mark.skip(reason="Ignorer les tests sur le modèle V1")
def test_consistent_output_level(capsys):
"""Test bugfix https://jira.worldline.com/browse/IGNGPF-702"""
......@@ -82,7 +85,14 @@ def test_consistent_output_level(capsys):
cli.main([])
out, err = capsys.readouterr()
with open(Path(environ["GPF_WORK_DIR"]) / "output.json") as output_json:
for file in glob.glob(f'{environ["GPF_WORK_DIR"]}/output*.json'):
shutil.move(
file, f'{environ["GPF_WORK_DIR"]}/output.1231544456-1546546-164565.json'
)
with open(
Path(environ["GPF_WORK_DIR"]) / "output.1231544456-1546546-164565.json"
) as output_json:
data = json.load(output_json)
assert data == result
......