| __pycache__ | |||||
| counter | |||||
| exports | |||||
| imports | |||||
| notes.md | |||||
| output | |||||
| run.sh | |||||
| tmp |
| - un dossier = une page | |||||
| - tous les fichiers d'un dossier = deviennent des pièces-jointes de la page du dossier | |||||
| - si fichier odt alors création d'une page avec le contenu de l'odt et fichier odt original en pièce-jointe | |||||
| - installer batch import 2.3 | |||||
| - puis cocher advanced et c/c: `com.xwiki.projects.ecolo:ecolo-subwiki` et choisir la 1.8 | |||||
| - l'upload du CSV se fait via https://www.xwiki2.localhost/bin/view/Doc/?sheet=BatchImport.BatchImportSheet | |||||
| - utiliser la DocClass (AppWithinMinutes ? de Ludovic et pas en faire une nouvelle) | |||||
| # Notes d'une réunion | |||||
| 1. test paramètre sheet (et d'autres) | |||||
| 2. => surcharge rendu de la page | |||||
| 3. si paramètre sheet | |||||
| - effet: | |||||
| - s'il utilise une sheet -> celle donnée en param. matriciel | |||||
| - [X] ajouter config gitlab dans create.sh | |||||
| - [N] installation cli -> mail ludovic, sinon via gui (voir plus haut) | |||||
| - pas possible, trop long (et pas utile) | |||||
| import os | |||||
| import sys | |||||
| from lib.create_import import create_import | |||||
| from lib.create_odts_import import create_odts_import | |||||
| from lib.get_config import get_config | |||||
| from lib.helpers import ( | |||||
| create_export_directory, | |||||
| is_import_directory_okay, | |||||
| ) | |||||
| def main(): | |||||
| try: | |||||
| CONFIG = get_config() | |||||
| is_import_directory_okay(CONFIG["absolute_starting_path"]) | |||||
| create_export_directory(CONFIG) | |||||
| create_import(CONFIG) | |||||
| create_odts_import(CONFIG) | |||||
| except ValueError as ve: | |||||
| sys.exit(ve) | |||||
| if __name__ == "__main__": | |||||
| main() |
| from lib.helpers import ( | |||||
| get_attachments_folder_path, | |||||
| get_document_space, | |||||
| get_document_space_title, | |||||
| write_csv, | |||||
| ) | |||||
| import os | |||||
| import shutil | |||||
| def create_import(CONFIG): | |||||
| rows = [] | |||||
| for current_path, folders, files in os.walk(CONFIG["absolute_starting_path"]): | |||||
| document_space = get_document_space(CONFIG, current_path) | |||||
| document_space_title = get_document_space_title(CONFIG, current_path) | |||||
| attachements_folder_path = get_attachments_folder_path(CONFIG, current_path) | |||||
| line = { | |||||
| "documentSpace": document_space, | |||||
| "documentName": "WebHome", | |||||
| "documentTitle": document_space_title, | |||||
| "attachmentsFolderPath": attachements_folder_path, | |||||
| "documentContent": "{{children/}}", | |||||
| "pilot": CONFIG["pilot"], | |||||
| } | |||||
| rows.append(line) | |||||
| path = os.path.join(CONFIG["export_directory"], attachements_folder_path) | |||||
| os.mkdir(path) | |||||
| if files: | |||||
| for file in files: | |||||
| shutil.copy( | |||||
| os.path.join(current_path, file), | |||||
| os.path.join(CONFIG["export_directory"], attachements_folder_path), | |||||
| ) | |||||
| write_csv(f"{CONFIG['export_directory']}/export-pages.csv", CONFIG["fields"], rows) |
| from lib.helpers import ( | |||||
| get_attachments_folder_path, | |||||
| get_document_space, | |||||
| write_csv, | |||||
| ) | |||||
| import os | |||||
| from pathlib import Path | |||||
| def create_odts_import(CONFIG): | |||||
| rows = [] | |||||
| for current_path, folders, files in os.walk(CONFIG["absolute_starting_path"]): | |||||
| if files: | |||||
| odts = list( | |||||
| filter( | |||||
| lambda f: os.path.splitext(f)[-1].lower() == ".odt", | |||||
| files, | |||||
| ), | |||||
| ) | |||||
| if odts: | |||||
| document_space = get_document_space(CONFIG, current_path) | |||||
| attachements_folder_path = get_attachments_folder_path( | |||||
| CONFIG, current_path | |||||
| ) | |||||
| lines = list( | |||||
| map( | |||||
| lambda odtf: { | |||||
| "documentSpace": document_space | |||||
| + "." | |||||
| + odtf.replace(".odt", "").replace(".", "_"), | |||||
| "documentName": "WebHome", | |||||
| "documentTitle": Path(odtf).stem.replace(".odt", ""), | |||||
| "attachmentsFolderPath": attachements_folder_path | |||||
| + "/" | |||||
| + odtf, | |||||
| "pilot": CONFIG["pilot"], | |||||
| }, | |||||
| odts, | |||||
| ) | |||||
| ) | |||||
| rows.extend(lines) | |||||
| write_csv( | |||||
| f"{CONFIG['export_directory']}/export-odts-pages.csv", CONFIG["fields"], rows | |||||
| ) |
| import argparse | |||||
| def get_arguments(): | |||||
| parser = argparse.ArgumentParser( | |||||
| allow_abbrev="false", | |||||
| description="Build an xwiki export CSV from a given directory.", | |||||
| epilog="alpha stage", | |||||
| prog="xwiki-exporter", | |||||
| ) | |||||
| parser.add_argument( | |||||
| "--importdirectory", | |||||
| help="import root directory", | |||||
| required=True, | |||||
| type=str, | |||||
| ) | |||||
| parser.add_argument( | |||||
| "--documentspace", | |||||
| help="xwiki document space", | |||||
| required=True, | |||||
| type=str, | |||||
| ) | |||||
| parser.add_argument( | |||||
| "--documentspacetitle", | |||||
| help="xwiki document space title", | |||||
| required=True, | |||||
| type=str, | |||||
| ) | |||||
| parser.add_argument( | |||||
| "--exportdirectory", | |||||
| help="export directory, will be created if not present", | |||||
| required=True, | |||||
| type=str, | |||||
| ) | |||||
| parser.add_argument( | |||||
| "--output", | |||||
| help="output CSV filename", | |||||
| required=False, | |||||
| type=str, | |||||
| ) | |||||
| parser.add_argument( | |||||
| "--pilot", | |||||
| help="XWiki pilot name", | |||||
| required=True, | |||||
| type=str, | |||||
| ) | |||||
| return parser.parse_args() |
| import os | |||||
| from lib.get_args import get_arguments | |||||
| def get_config(): | |||||
| ARGS = get_arguments() | |||||
| FIELDS = [ | |||||
| "documentSpace", | |||||
| "documentName", | |||||
| "documentTitle", | |||||
| "attachmentsFolderPath", | |||||
| "documentContent", | |||||
| "pilot", | |||||
| "types", | |||||
| "thematiques", | |||||
| ] | |||||
| return { | |||||
| "absolute_starting_path": os.path.abspath( | |||||
| os.path.normpath(ARGS.importdirectory) | |||||
| ), | |||||
| "document_space": ARGS.documentspace, | |||||
| "document_space_title": ARGS.documentspacetitle, | |||||
| "export_directory": ARGS.exportdirectory, | |||||
| "pilot": ARGS.pilot, | |||||
| "starting_path": os.path.normpath(ARGS.importdirectory), | |||||
| "fields": FIELDS, | |||||
| } |
| import csv | |||||
| import os | |||||
| import sys | |||||
| import zipfile | |||||
| from datetime import datetime | |||||
| def get_humane_date(timestamp, date_format): | |||||
| return datetime.fromtimestamp(timestamp).strftime(date_format) | |||||
| def write_csv(csv_path, FIELD_NAMES, rows): | |||||
| with open(csv_path, "w", encoding="UTF8", newline="") as file: | |||||
| writer = csv.DictWriter(file, fieldnames=FIELD_NAMES) | |||||
| writer.writeheader() | |||||
| writer.writerows(rows) | |||||
| def package_zip(zip_path, absolute_starting_path, zip_root_path, csv_path): | |||||
| shutil.make_archive( | |||||
| zip_path.replace(".zip", ""), | |||||
| "zip", | |||||
| absolute_starting_path.replace(zip_root_path, ""), | |||||
| zip_root_path, | |||||
| ) | |||||
| zip = zipfile.ZipFile(zip_path, "a") | |||||
| zip.write(csv_path, os.path.basename(csv_path)) | |||||
| zip.close() | |||||
| def create_export_directory(CONFIG): | |||||
| if os.path.exists(CONFIG["export_directory"]) and os.path.isdir( | |||||
| CONFIG["export_directory"] | |||||
| ): | |||||
| os.system("rm '%s' -r" % (CONFIG["export_directory"])) | |||||
| os.makedirs(CONFIG["export_directory"]) | |||||
| def is_import_directory_okay(path): | |||||
| if not os.path.exists(path): | |||||
| raise ValueError("aborting: import directory path does not exist") | |||||
| if not os.path.isdir(path): | |||||
| raise ValueError("aborting: import directory path is not a directory") | |||||
| return True | |||||
| def get_document_space(CONFIG, current_path): | |||||
| relative_path = os.path.relpath(current_path, CONFIG["absolute_starting_path"]) | |||||
| if relative_path == ".": | |||||
| document_space = CONFIG["document_space"] | |||||
| else: | |||||
| path = relative_path.replace(".", "_").replace("/", ".") | |||||
| document_space = f"{CONFIG['document_space']}.{path}" | |||||
| return document_space | |||||
| def get_attachments_folder_path(CONFIG, current_path): | |||||
| relative_path = os.path.relpath(current_path, CONFIG["absolute_starting_path"]) | |||||
| if relative_path == ".": | |||||
| attachements_folder_path = os.path.basename(CONFIG["absolute_starting_path"]) | |||||
| else: | |||||
| attachements_folder_path = relative_path.replace("/", ".") | |||||
| return attachements_folder_path | |||||
| def get_document_space_title(CONFIG, current_path): | |||||
| relative_path = os.path.relpath(current_path, CONFIG["absolute_starting_path"]) | |||||
| if relative_path == ".": | |||||
| document_space_title = CONFIG["document_space_title"] | |||||
| else: | |||||
| document_space_title = os.path.basename(current_path) | |||||
| return document_space_title |