Voici la documentation : https://geoplateforme.pages.gpf-tech.ign.fr/documentation

Skip to content
Validations sur la source (19)
......@@ -15,27 +15,40 @@ Unreleased
### Removed
-->
## 2.1.0 - 2023-04-03
## 2.0.1 - 2022-03-30
### Added
- fixe majuscules dans le hash MD5
- S3 parameters in CLI
### Changed
- Files are downloadable from S3 bucket
### Removed
## 2.0.1 - 2023-03-30
- fixe majuscules dans le hash MD5
## 2.0.0 - 2022-03-20
## 2.0.0 - 2023-03-20
### Added
- Handle v2 input/output
## 1.0.0 - 2022-03-17
## 1.0.1 - 2023-03-30
- bugfix majuscules dans le hash MD5
## 1.0.0 - 2023-03-17
- Release 1
## 0.6.2 - 2022-03-17
## 0.6.2 - 2023-03-17
- Fix tag release 1
## 0.6.1 - 2022-03-17
## 0.6.1 - 2023-03-17
- Wrong version, do not use
......
......@@ -40,7 +40,7 @@ __uri_repository__ = (
__uri_tracker__ = f"{__uri_repository__}issues/"
__uri__ = __uri_repository__
__version__ = "2.0.1"
__version__ = "2.1.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
......
......@@ -4,20 +4,14 @@
# standard lib
import argparse
import sys
from datetime import datetime
from os import getenv
from os import environ, getenv
from pathlib import Path
from typing import List
# 3rd party
from gpf_entrepot_toolbelt.orchestrator.check_livraison import check_livraison_structure
from gpf_entrepot_toolbelt.orchestrator.models import (
GpfOrchestratorParameters,
OutputDataStructure,
)
from gpf_entrepot_toolbelt.orchestrator.models import GpfOrchestratorParameters
from gpf_entrepot_toolbelt.orchestrator.status import Status
from gpf_entrepot_toolbelt.utils.dict_counter import count_dict_values
from gpf_entrepot_toolbelt.utils.exit_program import fail_and_exit
from gpf_entrepot_toolbelt.utils.gpf_logger import gpf_logger_script
......@@ -108,6 +102,43 @@ def main(argv: List[str] = None):
default=getenv("GPF_CHUNK_SIZE", 8192),
)
# S3 configuration
parser.add_argument(
"--s3-url",
metavar="GPF_S3_URL",
dest="s3_url",
type=str,
help="Url du storage S3",
default=getenv("GPF_S3_URL", "http://localhost:9000"),
)
parser.add_argument(
"--s3-key",
metavar="GPF_S3_KEY",
dest="s3_key",
type=str,
help="Utilisateur du stockage S3",
default=getenv("GPF_S3_KEY", "admin"),
)
parser.add_argument(
"--s3-secret-key",
metavar="GPF_S3_SECRETKEY",
dest="s3_secret_key",
type=str,
help="Password du stockage S3",
default=getenv("GPF_S3_SECRETKEY", "minioadmin"),
)
parser.add_argument(
"--s3-region",
metavar="GPF_S3_REGION",
dest="s3_region",
type=str,
help="Region du stockage S3",
default=getenv("GPF_S3_REGION", "eu-west-3"),
)
# -- PARSE PASSED ARGUMENTS --
# get passed args and force print help if none
......@@ -127,6 +158,14 @@ def main(argv: List[str] = None):
logger = gpf_logger_script(args.verbosity, __title_clean__)
# Définition des variables d'environnement
# Pour la toolbelt
if args.s3_url:
environ["GPF_S3_URL"] = args.s3_url
environ["GPF_S3_KEY"] = args.s3_key
environ["GPF_S3_SECRETKEY"] = args.s3_secret_key
environ["GPF_S3_REGION"] = args.s3_region
# -- RUN LOGIC --
# check livraison
if not check_livraison_structure(
......@@ -164,10 +203,26 @@ def main(argv: List[str] = None):
package_exec_name=__executable_name__,
)
# Download files in upload folder
try:
upload_dir_path: Path = args.work_dir_path / args.upload_dir_name
logger.debug(f"Téléchargement des fichiers dans le dossier {upload_dir_path}")
parameters.import_input_files(upload_dir_path)
except Exception as error:
error_message = "Download files into upload folder failed." f"Trace: {error}"
fail_and_exit(
work_dir=args.work_dir_path,
error_message=error_message,
package_title=__title_clean__,
package_version=__version__,
package_exec_name=__executable_name__,
)
# Run
try:
run_result = run(
work_dir=args.work_dir_path,
parameters=parameters,
upload_dir_name=args.upload_dir_name,
)
parameters.set_pipeline_status(__executable_name__, run_result[0])
......
......@@ -14,6 +14,7 @@ from pathlib import Path
from typing import Tuple
# package
from gpf_entrepot_toolbelt.orchestrator.models import GpfOrchestratorParameters
from gpf_entrepot_toolbelt.orchestrator.status import Status
from gpf_entrepot_toolbelt.utils.check_path import check_path
......@@ -124,6 +125,7 @@ def check_md5_file(filename: Path, status: dict, chunksize: int = 8192) -> int:
def run(
work_dir: Path,
parameters: GpfOrchestratorParameters,
upload_dir_name: str = "upload",
chunk_size: int = 8192,
) -> Tuple[Status, dict]:
......@@ -131,6 +133,8 @@ def run(
Args:
work_dir (Path): Input working directory. The folder must exist.
parameters (GpfOrchestratorParameters): parameters read from input configuration\
file.
upload_dir_name (str, optional): Name (not the path) of the upload directory. \
Defaults to "upload".
chunk_size (int, optional): Size of the chunk-data in octets to load in memory. \
......@@ -178,16 +182,16 @@ def run(
raise FileExistsError(
f"The upload folder doesn't exist: {upload_dir_path.resolve()}."
)
with os.scandir(upload_dir_path) as it:
for entry in it:
if entry.name.endswith(".md5") and entry.is_file():
logger.debug(f"Traitement de {entry.name} :")
result |= check_md5_file(
filename=Path(upload_dir_path) / entry.name,
status=status,
chunksize=chunk_size,
)
for upload in parameters.input_uploads:
with os.scandir(upload_dir_path / upload._id) as it:
for entry in it:
if entry.name.endswith(".md5") and entry.is_file():
logger.debug(f"Traitement de {entry.name} :")
result |= check_md5_file(
filename=Path(upload_dir_path) / upload._id / entry.name,
status=status,
chunksize=chunk_size,
)
result = result if result < 2 else 2 # Le résultat vaut 2 au maximum
return Status(result), status
......
......@@ -4,3 +4,4 @@
pytest-cov>=4,<5
semver>=2.13,<2.14
validators>=0.19,<0.21
moto>=4,<5
......@@ -2,41 +2,28 @@
"_id": "1231544456-1546546-164565",
"job_name": "",
"pipeline_status": {
"string": "string",
"job_name1" : "SUCCESS",
"job_name2" : "FAILURE"
},
"inputs": {
"uploads" : [{
"_id": "" ,
"_id": "invalid_upload" ,
"type": "stringEnum(uploadType)",
"name": "",
"type_infos": {},
"srs" : "",
"extent": {"x1": 0 , "x2" : 0 , "y1": 0, "y2" : 0},
"extent": {"east": 0 , "west" : 0 , "north": 0, "south" : 0},
"size": 0,
"storage": {
"_id": "string",
"name": "string",
"type": "stringEnum(storageType)",
"type_infos": {}
"type": "S3",
"type_infos": {
"pot_name": "upload-test-check-md5"
}
}
}],
"stored_datas" : [{
"_id": "",
"type": "stringEnum(storedDataType)",
"name": "",
"type_infos": {},
"srs": "",
"extent": {},
"size": 0,
"storage": {
"_id": "string",
"name": "string",
"type": "stringEnum(storageType)",
"type_infos": {}
}
}]
"stored_datas" : []
},
"parameters":[{
"name": "",
......