Implement XAP style merge semantics for DD keycodes (#19397)
parent
8c09170fff
commit
24adecd922
|
@ -1,12 +1,13 @@
|
||||||
"""Functions that help us generate and use info.json files.
|
"""Functions that help us generate and use info.json files.
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
from collections.abc import Mapping
|
|
||||||
from functools import lru_cache
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import hjson
|
import hjson
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
from collections.abc import Mapping
|
||||||
|
from functools import lru_cache
|
||||||
|
from typing import OrderedDict
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from milc import cli
|
from milc import cli
|
||||||
|
|
||||||
|
|
||||||
|
@ -101,3 +102,34 @@ def deep_update(origdict, newdict):
|
||||||
origdict[key] = value
|
origdict[key] = value
|
||||||
|
|
||||||
return origdict
|
return origdict
|
||||||
|
|
||||||
|
|
||||||
|
def merge_ordered_dicts(dicts):
|
||||||
|
"""Merges nested OrderedDict objects resulting from reading a hjson file.
|
||||||
|
Later input dicts overrides earlier dicts for plain values.
|
||||||
|
Arrays will be appended. If the first entry of an array is "!reset!", the contents of the array will be cleared and replaced with RHS.
|
||||||
|
Dictionaries will be recursively merged. If any entry is "!reset!", the contents of the dictionary will be cleared and replaced with RHS.
|
||||||
|
"""
|
||||||
|
result = OrderedDict()
|
||||||
|
|
||||||
|
def add_entry(target, k, v):
|
||||||
|
if k in target and isinstance(v, (OrderedDict, dict)):
|
||||||
|
if "!reset!" in v:
|
||||||
|
target[k] = v
|
||||||
|
else:
|
||||||
|
target[k] = merge_ordered_dicts([target[k], v])
|
||||||
|
if "!reset!" in target[k]:
|
||||||
|
del target[k]["!reset!"]
|
||||||
|
elif k in target and isinstance(v, list):
|
||||||
|
if v[0] == '!reset!':
|
||||||
|
target[k] = v[1:]
|
||||||
|
else:
|
||||||
|
target[k] = target[k] + v
|
||||||
|
else:
|
||||||
|
target[k] = v
|
||||||
|
|
||||||
|
for d in dicts:
|
||||||
|
for (k, v) in d.items():
|
||||||
|
add_entry(result, k, v)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from qmk.json_schema import deep_update, json_load, validate
|
from qmk.json_schema import merge_ordered_dicts, deep_update, json_load, validate
|
||||||
|
|
||||||
CONSTANTS_PATH = Path('data/constants/')
|
CONSTANTS_PATH = Path('data/constants/')
|
||||||
KEYCODES_PATH = CONSTANTS_PATH / 'keycodes'
|
KEYCODES_PATH = CONSTANTS_PATH / 'keycodes'
|
||||||
|
@ -16,20 +16,13 @@ def _find_versions(path, prefix):
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def _load_fragments(path, prefix, version):
|
def _potential_search_versions(version, lang=None):
|
||||||
file = path / f'{prefix}_{version}.hjson'
|
versions = list_versions(lang)
|
||||||
if not file.exists():
|
versions.reverse()
|
||||||
raise ValueError(f'Requested keycode spec ({prefix}:{version}) is invalid!')
|
|
||||||
|
|
||||||
# Load base
|
loc = versions.index(version) + 1
|
||||||
spec = json_load(file)
|
|
||||||
|
|
||||||
# Merge in fragments
|
return versions[:loc]
|
||||||
fragments = path.glob(f'{prefix}_{version}_*.hjson')
|
|
||||||
for file in fragments:
|
|
||||||
deep_update(spec, json_load(file))
|
|
||||||
|
|
||||||
return spec
|
|
||||||
|
|
||||||
|
|
||||||
def _search_path(lang=None):
|
def _search_path(lang=None):
|
||||||
|
@ -40,6 +33,34 @@ def _search_prefix(lang=None):
|
||||||
return f'keycodes_{lang}' if lang else 'keycodes'
|
return f'keycodes_{lang}' if lang else 'keycodes'
|
||||||
|
|
||||||
|
|
||||||
|
def _locate_files(path, prefix, versions):
|
||||||
|
# collate files by fragment "type"
|
||||||
|
files = {'_': []}
|
||||||
|
for version in versions:
|
||||||
|
files['_'].append(path / f'{prefix}_{version}.hjson')
|
||||||
|
|
||||||
|
for file in path.glob(f'{prefix}_{version}_*.hjson'):
|
||||||
|
fragment = file.stem.replace(f'{prefix}_{version}_', '')
|
||||||
|
if fragment not in files:
|
||||||
|
files[fragment] = []
|
||||||
|
files[fragment].append(file)
|
||||||
|
|
||||||
|
return files
|
||||||
|
|
||||||
|
|
||||||
|
def _process_files(files):
|
||||||
|
# allow override within types of fragments - but not globally
|
||||||
|
spec = {}
|
||||||
|
for category in files.values():
|
||||||
|
specs = []
|
||||||
|
for file in category:
|
||||||
|
specs.append(json_load(file))
|
||||||
|
|
||||||
|
deep_update(spec, merge_ordered_dicts(specs))
|
||||||
|
|
||||||
|
return spec
|
||||||
|
|
||||||
|
|
||||||
def _validate(spec):
|
def _validate(spec):
|
||||||
# first throw it to the jsonschema
|
# first throw it to the jsonschema
|
||||||
validate(spec, 'qmk.keycodes.v1')
|
validate(spec, 'qmk.keycodes.v1')
|
||||||
|
@ -62,9 +83,10 @@ def load_spec(version, lang=None):
|
||||||
|
|
||||||
path = _search_path(lang)
|
path = _search_path(lang)
|
||||||
prefix = _search_prefix(lang)
|
prefix = _search_prefix(lang)
|
||||||
|
versions = _potential_search_versions(version, lang)
|
||||||
|
|
||||||
# Load base
|
# Load bases + any fragments
|
||||||
spec = _load_fragments(path, prefix, version)
|
spec = _process_files(_locate_files(path, prefix, versions))
|
||||||
|
|
||||||
# Sort?
|
# Sort?
|
||||||
spec['keycodes'] = dict(sorted(spec.get('keycodes', {}).items()))
|
spec['keycodes'] = dict(sorted(spec.get('keycodes', {}).items()))
|
||||||
|
|
Loading…
Reference in New Issue