Voici la documentation : https://geoplateforme.pages.gpf-tech.ign.fr/documentation

Skip to content
Extraits de code Groupes Projets

Comparer les révisions

Les modifications sont affichées comme si la révision source était fusionnée avec la révision cible. En savoir plus sur la comparaison des révisions.

Source

Sélectionner le projet cible
No results found

Cible

Sélectionner le projet cible
  • geoplateforme/scripts-verification/check-md5
1 résultat
Afficher les modifications
Validations sur la source (24)
......@@ -16,7 +16,7 @@ include:
# only cache local items.
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
PROJECT_FOLDER: "md5_checker"
PROJECT_FOLDER: "src"
# Pip's cache doesn't store the python packages
# https://pip.pypa.io/en/stable/reference/pip_install/#caching
......@@ -82,15 +82,15 @@ test-unit:
before_script:
- python3 -m pip install -U -r requirements/testing.txt
script:
- pytest --junitxml=junit/test-results-unit.xml --cov-report=xml:coverage-reports/coverage-unit.xml
- pytest
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts:
when: always
reports:
junit: junit/test-results-unit.xml
junit: junit/test-results.xml
coverage_report:
coverage_format: cobertura
path: coverage-reports/coverage-unit.xml
path: coverage.xml
test-documentation:
stage: 🤞 test
......
......@@ -16,6 +16,12 @@ Unreleased
-->
## 0.2.0 - 2022-11-18
- Use argparse to expose a robust CLI
- Make the CLI installable through pip
- Refactor tests and documention
## 0.1.3 - 2022-11-16
- Documentation: add how to publish and install
......
......@@ -12,7 +12,7 @@ from datetime import datetime
sys.path.insert(0, os.path.abspath(r".."))
# project
from md5_checker import __about__
from src import __about__
# -- Build environment -----------------------------------------------------
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
......@@ -42,6 +42,8 @@ extensions = [
"sphinx.ext.intersphinx",
# 3rd party
"myst_parser",
# "sphinxarg.ext",
"sphinx_argparse_cli",
"sphinx_copybutton",
"sphinxext.opengraph",
]
......@@ -92,18 +94,11 @@ pygments_style = "sphinx"
# Theme
# html_favicon = "static/img/qgis_deployment_toolbelt_logo_200px.png"
# html_logo = "static/img/qgis_deployment_toolbelt_logo_200px.png"
html_theme = "sphinx_book_theme"
# html_theme = "sphinx_book_theme"
html_theme = "furo"
html_theme_options = {
"home_page_in_toc": True,
"path_to_docs": "docs",
"repository_branch": "main",
"repository_url": __about__.__uri_repository__,
"show_toc_level": 3,
"toc_title": "Table des matières",
"use_edit_page_button": True,
# "use_fullscreen_button": False,
"use_issues_button": True,
"use_repository_button": True,
"source_edit_link": f"{__about__.__uri_repository__}"
+ "/-/edit/main/docs/{filename}",
}
......@@ -120,9 +115,9 @@ html_theme_options = {
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {
"**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]
}
# html_sidebars = {
# "**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]
# }
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
......@@ -155,6 +150,8 @@ myst_enable_extensions = [
# replacement variables
myst_substitutions = {
"author": author,
"cli_name": __about__.__executable_name__,
"cli_usage": __about__.__cli_usage__,
"date_update": datetime.now().strftime("%d %B %Y"),
"description": description,
"repo_url": __about__.__uri__,
......@@ -181,7 +178,7 @@ def run_apidoc(_):
cur_dir = os.path.normpath(os.path.dirname(__file__))
output_path = os.path.join(cur_dir, "_apidoc")
modules = os.path.normpath(os.path.join(cur_dir, "../md5_checker/"))
modules = os.path.normpath(os.path.join(cur_dir, "../src/"))
exclusions = ["../input", "../output", "/tests"]
main(["-e", "-f", "-M", "-o", output_path, modules] + exclusions)
......
......@@ -18,7 +18,7 @@ Fill the `.pypirc` file with the credentials.
From here, we use the deploy token:
```rc
```ini
; deploy for project using group deploy token
; see: https://gitlab.gpf-tech.ign.fr/groups/geoplateforme/scripts-verification/-/settings/repository
[gitlab-gpf-prj-dt]
......
# Tests
## Requirements
## Requirements {: tests-requirements }
```sh
pip install -U -r requirements/testing.txt
......
......@@ -13,7 +13,8 @@
caption: Usage
maxdepth: 1
---
installation
usage/installation
usage/cli
```
```{toctree}
......
# Utilisation en ligne de commande
Une fois l'outil installé, il est appelable en ligne de commande : *{{ cli_name }}*.
## Fonctionnement général
{{ cli_usage }}
```{sphinx_argparse_cli}
:module: src.cli
:hook:
:func: main
:prog: gpf-md5-checker
:title: Commandes et options
```
......@@ -21,7 +21,7 @@ Dans les commandes suivantes, remplacer :
```sh
# Windows: py -3
python3 -m pip install -U gpf-md5-checker --index-url https://<personal_access_token_name>:<your_personal_token>/api/v4/groups/55/-/packages/pypi/simple
python3 -m pip install -U gpf-md5-checker --index-url https://<personal_access_token_name>:<your_personal_token>@gitlab.gpf-tech.ign.fr/api/v4/groups/55/-/packages/pypi/simple
```
#### Derrière un proxy
......@@ -36,7 +36,7 @@ Puis ajouter l'URL du proxy en option :
```sh
# Windows: py -3
python3 -m pip install -U gpf-md5-checker --index-url https://<personal_access_token_name>:<your_personal_token>/api/v4/groups/55/-/packages/pypi/simple --proxy socks5://localhost:8645
python3 -m pip install -U gpf-md5-checker --index-url https://<personal_access_token_name>:<your_personal_token>@gitlab.gpf-tech.ign.fr/api/v4/groups/55/-/packages/pypi/simple --proxy socks5://localhost:8645
```
:::{note}
......
# Documentation
# -------------
furo==2022.*
myst-parser[linkify]>=0.14,<0.19
pip-licenses>=3.5,<3.6
sphinx-argparse-cli>=1,<2
sphinx-autobuild==2021.3.14
# sphinx-autodoc-typehints<1.20
sphinx-book-theme<1
sphinx-copybutton<1
sphinxext-opengraph>=0.4,<1
......@@ -54,9 +54,10 @@ use_parentheses = True
addopts =
--junitxml=junit/test-results.xml
--cov-config=setup.cfg
--cov=md5_checker
--cov-report=xml
--cov=src
--cov-report=html
--cov-report=xml
--cov-report=term
--ignore=tests/_wip/
junit_family = xunit2
norecursedirs = .* build dev development dist docs CVS fixtures _darcs {arch} *.egg venv _wip
......
......@@ -13,7 +13,7 @@ from pathlib import Path
from setuptools import find_packages, setup
# package (to get version)
from md5_checker import __about__
from src import __about__
# ############################################################################
# ########### Globals ##############
......@@ -56,10 +56,14 @@ setup(
"Topic :: System :: Installation/Setup",
],
# packaging
py_modules=["md5_checker"],
py_modules=["src"],
packages=find_packages(
exclude=["contrib", "docs", "*.tests", "*.tests.*", "tests.*", "tests", ".venv"]
),
include_package_data=True,
install_requires=requirements,
# cli
entry_points={
"console_scripts": [f"{__about__.__executable_name__} = src.cli:main"]
},
)
......@@ -21,9 +21,9 @@ __all__ = [
__author__ = "Oslandia"
__copyright__ = "2022 - {0}, {1}".format(date.today().year, __author__)
__email__ = "info@oslandia.com"
__executable_name__ = "gpf_md5_checker.exe"
__executable_name__ = "gpf-md5-checker"
__package_name__ = "gpf_md5_checker"
__keywords__ = ["cli, check, IGN, Géoplateforme, profiles"]
__keywords__ = ["cli, check, IGN, Géoplateforme, md5"]
__license__ = "MIT"
__summary__ = (
"Vérifie l'intégrité des données publiées dans la Géoplateforme de "
......@@ -40,10 +40,22 @@ __uri_repository__ = (
__uri_tracker__ = "https://gitlab.gpf-tech.ign.fr/geoplateforme/scripts-verification/check-md5/issues/"
__uri__ = __uri_repository__
__version__ = "0.1.3"
__version__ = "0.2.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
__cli_usage__ = (
"Le programme va vérifier les hash md5 contenu dans le dossier `GPF_WORK_DIR/upload` "
"Le fichier <fichier.md5> peut contenir plusieurs hash.\n"
"Chaque ligne doit être de la forme :"
"<hash_md5> <filename>"
"Entre le hash md5 et le fichier se trouve deux espaces."
"Pour maximiser la compatibilité, l'algorithme va rechercher si le fichier contient"
"32 caractères pour le hash md5 puis du texte."
"Le programme s'occupe de convertir les fins de fichier Windows en unix."
)
Fichier déplacé
#! python3 # noqa: E265
"""Main CLI entrypoint.
"""
# standard lib
import argparse
import logging
import sys
from os import getenv
from typing import List
from src.__about__ import (
__author__,
__cli_usage__,
__summary__,
__title__,
__title_clean__,
__uri_homepage__,
__version__,
)
# package
from src.constants import arg_type_path_folder
from src.md5sum import run
def main(command_line_arguments: List[str] = None):
"""Main CLI entrypoint"""
# create the top-level parser
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=f"{__cli_usage__}\n\n"
f"Développé avec \u2764\uFE0F par {__author__}\n"
f"Documentation : {__uri_homepage__}",
description=f"{__title__} {__version__} - {__summary__}",
)
# -- ROOT ARGUMENTS --
# Optional verbosity counter (eg. -v, -vv, -vvv, etc.)
parser.add_argument(
"-v",
"--verbose",
action="count",
default=1,
dest="verbosity",
help="Verbosity level: None = WARNING, -v = INFO, -vv = DEBUG",
)
parser.add_argument(
"--version",
action="version",
version=__version__,
)
parser.add_argument(
"--workdir",
"--work-dir-path",
dest="work_dir_path",
type=arg_type_path_folder,
help="Input working directory. Must exist.",
default=getenv("GPF_WORK_DIR"),
)
parser.add_argument(
"--uploaddir",
"--upload-dir-name",
dest="upload_dir_name",
type=str,
help="Name (not the path) of the upload directory.",
default=getenv("GPF_UPLOAD_DIR", "upload"),
)
parser.add_argument(
"--chunk-size",
dest="chunk_size",
type=int,
help="Input working directory. Must exist.",
default=getenv("GPF_CHUNK_SIZE", 8192),
)
# -- PARSE PASSED ARGUMENTS --
# get passed args and force print help if none
args = parser.parse_args(None if sys.argv[1:] else ["-h"])
# just get passed args
# args = parser.parse_args()
# set log level depending on verbosity argument
if 0 < args.verbosity < 4:
args.verbosity = 40 - (10 * args.verbosity)
elif args.verbosity >= 4:
# debug is the limit
args.verbosity = 40 - (10 * 3)
else:
args.verbosity = 0
logging.basicConfig(
level=args.verbosity,
format="%(asctime)s||%(levelname)s||%(module)s||%(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
console = logging.StreamHandler()
console.setLevel(args.verbosity)
# add the handler to the root logger
logger = logging.getLogger(__title_clean__)
logger.debug(f"Log level set: {logging.getLevelName(args.verbosity)}")
# Run
run(
work_dir=args.work_dir_path,
upload_dir_name=args.upload_dir_name,
chunk_size=args.chunk_size,
)
# -- Stand alone execution
if __name__ == "__main__":
sys.exit(main()) # required by unittest
#! python3 # noqa: E265
"""Static objects and variables."""
# standard lib
from argparse import ArgumentTypeError
from enum import Enum
from pathlib import Path
from typing import Union
class Status(Enum):
"""Output status for GPF orchestrator."""
SUCCESS = 0
FAILURE = 1
ERROR = 2
def arg_type_path_folder(input_path: Union[Path, str]) -> Path:
"""Check an argparse argument type, expecting a valid folder path.
Args:
input_path (Union[Path, str]): path to check as string or pathlib.Path
Raises:
ArgumentTypeError: if the input path is not a valid type or not a folder or
doesn't exist.
Returns:
Union[Path, ArgumentTypeError]: _description_
"""
if not isinstance(input_path, (Path, str)):
raise ArgumentTypeError(
TypeError(
"Input path must be a valid path as "
f"pathlib.path or str, not {type(input_path)}."
)
)
if isinstance(input_path, str):
try:
input_path = Path(input_path)
except Exception as exc:
raise ArgumentTypeError(
f"Converting {input_path} into Path failed. Trace: {exc}"
)
# check the path exists
if not input_path.exists():
raise ArgumentTypeError(f"{input_path.resolve()} doesn't exist.")
# check if it's a folder
if not input_path.is_dir():
raise ArgumentTypeError(f"{input_path.resolve()} is not a folder.")
return input_path
#! python3 # noqa: E265
"""Main module logic."""
# -- IMPORTS
# standard lib
import hashlib
import json
import logging
import os
import sys
from collections import namedtuple
from enum import Enum
from os import getenv
from pathlib import Path
MD5HASHSIZE = 32
CHUNKSIZE = 8192
# package
from src.constants import Status
# -- GLOBALS
class Status(Enum):
SUCCESS = 0
FAILURE = 1
ERROR = 2
# logs
logger = logging.getLogger(__name__)
# -- FUNCTIONS
def testFilename(filename: str) -> bool:
"""
Simple méthode pour convertir une chaîne en Path
......@@ -34,30 +42,30 @@ def testDirname(dirname: str) -> bool:
return path.is_dir()
def generatemd5sum(filename: str) -> str:
"""
Génère un hash md5 du fichier filename.
def generatemd5sum(filename: str, chunksize: int = 8192) -> str:
"""Génère un hash md5 du fichier filename.
Exemple (pour un fichier contenant le texte "md5")
`echo "md5" > /tmp/md5.txt`
Example:
Exemple (pour un fichier contenant le texte "md5")
`echo "md5" > /tmp/md5.txt`
>>> generatemd5sum("/tmp/md5.txt")
'772ac1a55fab1122f3b369ee9cd31549'
>>> generatemd5sum("/tmp/md5.txt")
'772ac1a55fab1122f3b369ee9cd31549'
Preconditions:
filename est un fichier valide
filename est un fichier valide
Postconditions:
Retourne un hash md5 (chaîne hexadécimale de 32 caractères)
Retourne un hash md5 (chaîne hexadécimale de 32 caractères)
"""
with open(filename, "rb") as f:
file_hash = hashlib.md5()
while chunk := f.read(CHUNKSIZE):
while chunk := f.read(chunksize):
file_hash.update(chunk)
return file_hash.hexdigest()
def validate(filename: str, md5digest: str) -> Status:
def validate(filename: str, md5digest: str, chunksize: int = 8192) -> Status:
"""
Validation du hash md5 `md5digest` du fichier `filename`
......@@ -69,12 +77,14 @@ def validate(filename: str, md5digest: str) -> Status:
Status.SUCCESS autrement
"""
if not testFilename(filename):
print(f"{filename}: TECHNICAL ERROR: le fichier {filename} n'existe pas.")
logger.error(
f"{filename}: TECHNICAL ERROR: le fichier {filename} n'existe pas."
)
return Status.ERROR
result = generatemd5sum(filename) == md5digest
result = generatemd5sum(filename, chunksize) == md5digest
status_return = Status.SUCCESS if result is True else Status.FAILURE
print(f"{filename}: {status_return.name}")
logger.debug(f"{filename}: {status_return.name}")
return status_return
......@@ -90,9 +100,8 @@ def countDictValues(d: dict, val) -> int:
return result
def checkMD5File(filename: str, status: dict) -> int:
"""
Vérifie un fichier *.md5
def checkMD5File(filename: str, status: dict, chunksize: int = 8192) -> int:
"""Vérifie un fichier *.md5.
Ce genre de fichier est classiquement géneré par l'utilitaire
md5sum (ou md5 -r) sous unix.
......@@ -115,16 +124,20 @@ def checkMD5File(filename: str, status: dict) -> int:
for line in checksum_file:
line = line.strip()
if len(line) <= MD5HASHSIZE:
if len(line) <= 32:
# TODO: Quel log on utilise pour cela ?
print(f"TECHNICAL ERROR: la ligne {line} n'est pas conforme.")
logger.error(
f"TECHNICAL ERROR: la ligne {line} n'est pas conforme."
)
result |= Status.ERROR.value
continue
checksum = line[:MD5HASHSIZE]
checksum = line[:32]
basename = Path(filename).parent
sourceFilename = str(basename / line[MD5HASHSIZE:].lstrip())
ret = validate(sourceFilename, checksum)
sourceFilename = str(basename / line[32:].lstrip())
ret = validate(
filename=sourceFilename, md5digest=checksum, chunksize=chunksize
)
resultDict = namedtuple("Result", ["filename", "status"])
result |= ret.value
resultList.append(resultDict(sourceFilename, ret.name))
......@@ -132,28 +145,18 @@ def checkMD5File(filename: str, status: dict) -> int:
except EnvironmentError as e:
# TODO: gestion erreurs si on ne peut pas :
# où que ça va ?
print(f"TECHNICAL ERROR: {e}")
logger.error(f"TECHNICAL ERROR: {e}")
exit(os.EX_IOERR)
return result
def usage():
print(f"{sys.argv[0]}")
print("Le programme va vérifier les hash md5 contenu dans le dossier $UPLOAD_DIR")
print("Le fichier <fichier.md5> peut contenir plusieurs hash.")
print("Chaque ligne doit être de la forme:")
print("<hash_md5> <filename>")
print("Entre le hash md5 et le fichier se trouve deux espaces.")
print(
"Pour maximiser la compatibilité, l'algorithme va rechercher si le fichier contient"
)
print("32 caractères pour le hash md5 puis du texte.")
print("Le programme s'occupe de convertir les fins de fichier windows en unix.")
def main():
"""
def run(
work_dir: Path = None,
upload_dir_name: str = "upload",
chunk_size: int = 8192,
) -> int:
"""Main function running the logic.
Returns:
0 indique un SUCCESS
......@@ -161,50 +164,77 @@ def main():
2 pour indiquer qu'il y a eu au moins une erreur technique
3 pour indiquer qu'il y a eu au moins une fois les deux erreurs
"""
upDir = os.getenv("UPLOAD_DIR")
workDir = os.getenv("WORK_DIR")
result = 0
# checks
if not isinstance(work_dir, Path):
raise TypeError(
f"work_dir must be a pathlib.Path instance, not {type(work_dir)} ({work_dir})."
)
if not work_dir.is_dir():
raise ValueError(
f"work_dir must be a valid folder path. {work_dir.resolve()} is not."
)
if not upDir or not workDir:
print("La variable UPLOAD_DIR ou WORK_DIR n'a pas été définie.")
usage()
exit(os.EX_NOINPUT)
if not work_dir.exists():
raise FileExistsError(f"work_dir folder doesn't exist: {work_dir.resolve()}.")
if not testDirname(upDir) or not testDirname(workDir):
# TODO: Quel log on utilise pour cela ?
print(f"{upDir} ou {workDir} n'existe pas ou n'est pas un dossier.")
usage()
exit(os.EX_NOINPUT)
if not isinstance(upload_dir_name, str):
raise TypeError(f"work_dir must be a str instance, not {type(work_dir)}.")
# variables
upload_dir_path: Path = work_dir / upload_dir_name
result: int = 0
status = dict()
with os.scandir(upDir) as it:
# check upload
if not upload_dir_path.is_dir():
raise ValueError(
f"The upload folder must be a valid folder path. {upload_dir_path.resolve()} is not."
)
if not upload_dir_path.exists():
raise FileExistsError(
f"The upload folder doesn't exist: {upload_dir_path.resolve()}."
)
with os.scandir(upload_dir_path) as it:
for entry in it:
if entry.name.endswith(".md5") and entry.is_file():
print(f"Traitement de {entry.name} :")
result |= checkMD5File(Path(upDir) / entry.name, status)
logger.debug(f"Traitement de {entry.name} :")
result |= checkMD5File(
filename=Path(upload_dir_path) / entry.name,
status=status,
chunksize=chunk_size,
)
# TODO: Où exporte-t-on le fichier ?
try:
with open(Path(workDir, "output.json"), "w") as outfile:
with open(Path(work_dir, "output.json"), "w") as outfile:
json.dump(status, outfile, indent="\t", sort_keys=True)
outfile.write("\n")
except EnvironmentError as e:
print(f"TECHNICAL ERROR: {e}")
print("Impossible d'enregistrer le résultat.")
exit(os.EX_IOERR)
except EnvironmentError as exc:
logger.error(f"Impossible d'enregistrer le résultat. TECHNICAL ERROR: {exc}")
raise exc
success = countDictValues(status, Status.SUCCESS.name)
failure = countDictValues(status, Status.FAILURE.name)
error = countDictValues(status, Status.ERROR.name)
print(f"Testés : {success + failure + error}")
print(f"SUCCESS : {success}")
print(f"FAILURE : {failure}")
print(f"ERROR : {error}")
logger.debug(f"Testés : {success + failure + error}")
logger.debug(f"SUCCESS : {success}")
logger.debug(f"FAILURE : {failure}")
logger.debug(f"ERROR : {error}")
return result
# -- Stand alone execution
if __name__ == "__main__":
main()
print(
run(
work_dir=getenv("GPF_WORK_DIR", Path("./tests")),
upload_dir_name=getenv("GPF_UPLOAD_DIR", "assets"),
chunk_size=getenv("GPF_CHUNK_SIZE", 8192),
)
)
......@@ -18,7 +18,7 @@ from semver import VersionInfo
from validators import url
# project
from md5_checker import __about__
from src import __about__
# ############################################################################
# ########## Classes #############
......
......@@ -5,24 +5,24 @@
.. code-block:: bash
# for whole tests
python -m unittest tests.test_about
python -m unittest tests.test_md5_checker
# for specific test
python -m unittest tests.test_about.TestAbout.test_version_semver
python -m unittest tests.test_md5_checker.TestMD5.test_script_run_ko
"""
import os
import stat
import tempfile
# standard library
import os
import tempfile
import unittest
from collections import namedtuple
from pathlib import Path
# 3rd party
import pytest
# project
from md5_checker import md5sum
from src import __about__, md5sum
# ############################################################################
# ########## Classes #############
......@@ -32,6 +32,13 @@ from md5_checker import md5sum
class TestMD5(unittest.TestCase):
"""Test package metadata."""
# -- Standard methods --------------------------------------------------------
@classmethod
def setUpClass(cls):
"""Executed when module is loaded before any test."""
pass
# -- TESTS ---------------------------------------------------------
def test_filename(self):
"""Test is_filename."""
self.assertTrue(
......@@ -197,56 +204,51 @@ class TestMD5(unittest.TestCase):
self.assertTrue(status, ret_status)
print(ret)
# Technical error: file not found
status = dict()
with pytest.raises(SystemExit) as pytest_wrapped_e:
md5sum.checkMD5File(
Path(Path(__file__).parent, "assets/failed_all"), status
)
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 74 # EX_IOERR
def test_script_run_ok(self):
"""test main script run."""
def test_main(self):
"""main test."""
ret = md5sum.run(work_dir=Path("./tests"), upload_dir_name="assets")
# no variable UPLOAD_DIR
with pytest.raises(SystemExit) as pytest_wrapped_e:
md5sum.main()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 66 # EX_NOINPUT
os.environ["UPLOAD_DIR"] = str(Path(Path(__file__).parent, "assets"))
"""Expected log out:
Testés : 10
SUCCESS : 8
FAILURE : 1
ERROR : 1
"""
self.assertEqual(ret, 3)
# no variable WORK_DIR
with pytest.raises(SystemExit) as pytest_wrapped_e:
md5sum.main()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 66 # EX_NOINPUT
def test_script_run_ko(self):
"""Test case where main script should raise an error."""
# -- Work dir
# invalide WORK_DIR
os.environ["WORK_DIR"] = str(Path(Path(__file__).parent, "dhsfpiauz"))
with pytest.raises(SystemExit) as pytest_wrapped_e:
md5sum.main()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 66 # EX_NOINPUT
# work_dir is not a Path instance
with self.assertRaises(TypeError):
md5sum.run()
with self.assertRaises(TypeError):
md5sum.run(work_dir="./tests/assets")
# invalid WORK_DIR
# works only locally?
# with tempfile.TemporaryDirectory() as tmpdirname:
# os.chmod(tmpdirname, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
# os.environ["WORK_DIR"] = str(tmpdirname)
# with pytest.raises(SystemExit) as pytest_wrapped_e:
# md5sum.main()
# assert pytest_wrapped_e.type == SystemExit
# assert pytest_wrapped_e.value.code == 74 # EX_IOERR
# work_dir is not a valid folder path
with self.assertRaises(ValueError):
md5sum.run(work_dir=Path("/imaginary/folder/path/"))
# -- Upload dir name
# upload_dir_name is not a str instance
with self.assertRaises(TypeError):
md5sum.run(upload_dir_name=Path("./tests/assets"))
# work_dir is not a valid folder path
with self.assertRaises(ValueError):
md5sum.run(work_dir=Path("./tests/"), upload_dir_name="hohoho")
# Technical error: file not found
status = dict()
with self.assertRaises(SystemExit) as exc:
md5sum.checkMD5File(
Path(Path(__file__).parent, "assets/failed_all"), status
)
with tempfile.TemporaryDirectory() as tmpdirname:
os.environ["WORK_DIR"] = str(tmpdirname)
ret = md5sum.main()
# Testés : 10
# SUCCESS : 8
# FAILURE : 1
# ERROR : 1
self.assertEqual(ret, 3)
self.assertEqual(exc.exception.code, 74) # EX_IOERR
# ############################################################################
......