#!/usr/bin/python import argparse import json import sys from difflib import SequenceMatcher from enum import Enum, auto from typing import List, Optional from termcolor import cprint # python3-termcolor import jsonschema from wrpylib.mwapi import WikiSite from wrpylib.wrmwmarkup import create_sledrun_wiki def _format_range_unified(start: int, stop: int) -> str: """Copied from difflib._format_range_unified""" beginning = start + 1 # lines start numbering with one length = stop - start if length == 1: return str(beginning) if not length: beginning -= 1 # empty ranges begin at line just before the range return f'{beginning},{length}' def unified_diff(a: str, b: str, context: int = 3): a_lines = a.splitlines() b_lines = b.splitlines() for group in SequenceMatcher(None, a_lines, b_lines).get_grouped_opcodes(context): first, last = group[0], group[-1] file1_range = _format_range_unified(first[1], last[2]) file2_range = _format_range_unified(first[3], last[4]) cprint(f'@@ -{file1_range} +{file2_range} @@', 'magenta') for tag, i1, i2, j1, j2 in group: if tag == 'equal': for line in a_lines[i1:i2]: print(f' {line}') continue if tag in {'replace', 'delete'}: for line in a_lines[i1:i2]: cprint(f'- {line}', 'red') if tag in {'replace', 'insert'}: for line in b_lines[j1:j2]: cprint(f'+ {line}', 'green') class Choice(Enum): yes = auto() no = auto() quit = auto() def input_yes_no_quit(text: str, default: Optional[Choice]) -> Choice: while True: result = input(text) if result in ['Y', 'y', 'yes']: return Choice.yes elif result in ['N', 'n', 'no']: return Choice.no elif result in ['Q', 'q', 'quit']: return Choice.quit elif result == '' and default is not None: return default cprint(f'Unrecognized input: "{result}"', 'red') def update_sledrun(site: WikiSite, wiki_page: dict, json_page: dict, map_page: dict, impression_page: dict): json_page_main_slot = json_page['revisions'][0]['slots']['main'] assert json_page_main_slot['contentmodel'] == 'json' sledrun_json = json.loads(json_page_main_slot['content']) jsonschema.validate(instance=sledrun_json, schema=site.sledrun_schema()) if 'missing' in map_page: map_json = None else: map_page_main_slot = map_page['revisions'][0]['slots']['main'] assert map_page_main_slot['contentmodel'] == 'json' map_json = json.loads(map_page_main_slot['content']) if 'missing' in impression_page: impression_title = None else: impression_title = impression_page['title'] new_text = create_sledrun_wiki(sledrun_json, map_json, impression_title).strip() previous_text = wiki_page['revisions'][0]['slots']['main']['content'].strip() if new_text == previous_text: return cprint(wiki_page['title'], 'green') unified_diff(previous_text, new_text) choice = input_yes_no_quit('Do you accept the changes [yes, no, quit]? ', None) if choice == Choice.no: return if choice == Choice.quit: sys.exit(0) site( 'edit', pageid=wiki_page['pageid'], text=new_text, summary='Rodelbahnbeschreibung aus JSON Daten aktualisiert.', minor=1, bot=1, baserevid=wiki_page['revisions'][0]['revid'], nocreate=1, token=site.token(), ) def update_sledrun_wikitext(ini_files: List[str], update_all: bool): site = WikiSite(ini_files) cm_limit = 5 # 'max' for wikitext_result in site.query(list='categorymembers', cmtitle='Kategorie:Rodelbahn', cmlimit=cm_limit): wikitext_title_list = [page["title"] for page in wikitext_result['categorymembers']] json_title_list = [f'{title}/Rodelbahn.json' for title in wikitext_title_list] wikitext_result = query_revisions(site, wikitext_title_list, []) json_result = query_revisions(site, json_title_list, []) update_wikitext_title_list = [] update_json_title_list = [] for wikitext_page, json_page in zip(wikitext_result, json_result): assert wikitext_page['title'] + '/Rodelbahn.json' == json_page['title'] if 'missing' in json_page: continue if wikitext_page['revisions'][0]['timestamp'] < json_page['revisions'][0]['timestamp']: update_wikitext_title_list.append(wikitext_page['title']) update_json_title_list.append(json_page['title']) if len(update_wikitext_title_list) == 0: continue update_map_title_list = [f'{title}/Landkarte.json' for title in update_wikitext_title_list] update_impression_title_list = [f'{title}/Impressionen' for title in update_wikitext_title_list] update_wikitext_result = query_revisions(site, update_wikitext_title_list, ['content']) update_json_result = query_revisions(site, update_json_title_list, ['content']) update_map_result = query_revisions(site, update_map_title_list, ['content']) update_impression_result = query_revisions(site, update_impression_title_list, ['content']) for wikitext_page, json_page, map_page, impression_page in \ zip(update_wikitext_result, update_json_result, update_map_result, update_impression_result): assert wikitext_page['title'] + '/Rodelbahn.json' == json_page['title'] update_sledrun(site, wikitext_page, json_page, map_page, impression_page) def query_revisions(site: WikiSite, title_list: List[str], extra_rv_prop: List[str]) -> List[dict]: rv_prop = ['timestamp', 'ids'] + extra_rv_prop pages = next(site.query(prop='revisions', titles=title_list, rvslots='*', rvprop=rv_prop))['pages'] pages = sorted(pages, key=lambda p: title_list.index(p['title'])) assert len(title_list) == len(pages) return pages def main(): parser = argparse.ArgumentParser(description='Update sledrun wikitext from JSON') parser.add_argument('--all', action='store_true', help='update all sledruns regardless of modification date differences') parser.add_argument('inifile', nargs='+', help='inifile.ini, see: https://www.winterrodeln.org/trac/wiki/ConfigIni') args = parser.parse_args() update_sledrun_wikitext(args.inifile, args.all) if __name__ == '__main__': main()