1396 lines
55 KiB
Python
1396 lines
55 KiB
Python
# -*- coding: utf-8 -*-
|
|
# Copyright (c) 2020-2023 Salvador E. Tropea
|
|
# Copyright (c) 2020-2023 Instituto Nacional de Tecnología Industrial
|
|
# Copyright (c) 2018 John Beard
|
|
# License: GPL-3.0
|
|
# Project: KiBot (formerly KiPlot)
|
|
# Adapted from: https://github.com/johnbeard/kiplot
|
|
"""
|
|
Class to read KiBot config files
|
|
"""
|
|
|
|
from copy import deepcopy
|
|
import collections
|
|
from collections import OrderedDict
|
|
import difflib
|
|
import io
|
|
import json
|
|
import os
|
|
import re
|
|
from sys import maxsize
|
|
import sys
|
|
import textwrap
|
|
|
|
from .error import KiPlotConfigurationError, config_error
|
|
from .misc import (NO_YAML_MODULE, EXIT_BAD_ARGS, EXAMPLE_CFG, WONT_OVERWRITE, W_NOOUTPUTS, W_UNKOUT, W_NOFILTERS,
|
|
W_NOVARIANTS, W_NOGLOBALS, TRY_INSTALL_CHECK, W_NOPREFLIGHTS, W_NOGROUPS, W_NEWGROUP, error_level_to_name,
|
|
DEFAULT_ROTATIONS, DEFAULT_OFFSETS)
|
|
from .gs import GS
|
|
from .registrable import RegOutput, RegVariant, RegFilter, RegDependency
|
|
from .pre_base import BasePreFlight
|
|
from . import __pypi_deps__
|
|
# Logger
|
|
from . import log
|
|
|
|
logger = log.get_logger()
|
|
LOCAL_OPTIONAL = 1
|
|
GLOBAL_OPTIONAL = LOCAL_OPTIONAL*100
|
|
LOCAL_MANDATORY = GLOBAL_OPTIONAL*100
|
|
GLOBAL_MANDATORY = LOCAL_MANDATORY*100
|
|
GITHUB_RAW = 'https://raw.githubusercontent.com/INTI-CMNB/KiBot/master/docs/images/'
|
|
DEB_LOGO_URL = GITHUB_RAW+'debian-openlogo-22x22.png'
|
|
DEB_LOGO = ''
|
|
PYPI_LOGO_URL = GITHUB_RAW+'PyPI_logo_simplified-22x22.png'
|
|
PYPI_LOGO = ('')
|
|
PY_LOGO_URL = GITHUB_RAW+'Python-logo-notext-22x22.png'
|
|
PY_LOGO = ('')
|
|
TOOL_LOGO_URL = GITHUB_RAW+'llave-inglesa-22x22.png'
|
|
TOOL_LOGO = ''
|
|
AUTO_DOWN_URL = GITHUB_RAW+'auto_download-22x22.png'
|
|
AUTO_DOWN = ''
|
|
VALID_SECTIONS = {'kiplot', 'kibot', 'import', 'global', 'filters', 'variants', 'preflight', 'outputs', 'groups'}
|
|
VALID_KIBOT_SEC = {'version', 'imported_global_has_less_priority'}
|
|
RST_WARNING = ".. Automatically generated by KiBot, please don't edit this file\n"
|
|
rst_mode = False
|
|
|
|
|
|
try:
|
|
import yaml
|
|
except ImportError:
|
|
log.init()
|
|
GS.exit_with_error(['No yaml module for Python, install python3-yaml', TRY_INSTALL_CHECK], NO_YAML_MODULE)
|
|
|
|
|
|
def update_dict(d, u):
|
|
for k, v in u.items():
|
|
if isinstance(v, collections.abc.Mapping):
|
|
d[k] = update_dict(d.get(k, {}), v)
|
|
elif isinstance(v, list) and k in d:
|
|
d[k] = v+d[k]
|
|
else:
|
|
d[k] = v
|
|
return d
|
|
|
|
|
|
def do_replace(k, v, content, replaced):
|
|
key = '@'+k+'@'
|
|
if key in content:
|
|
logger.debugl(2, '- Replacing {} -> {}'.format(key, v))
|
|
content = content.replace(key, str(v))
|
|
replaced = True
|
|
return content, replaced
|
|
|
|
|
|
class CollectedImports(object):
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.outputs = []
|
|
self.filters = {}
|
|
self.variants = {}
|
|
self.globals = {}
|
|
self.preflights = []
|
|
self.groups = {}
|
|
self.imported_global_has_less_priority = False
|
|
|
|
|
|
class CfgYamlReader(object):
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.imported_globals = {}
|
|
self.no_run_by_default = []
|
|
self.imported_global_has_less_priority = False
|
|
|
|
def _check_version(self, v):
|
|
if not isinstance(v, dict):
|
|
raise KiPlotConfigurationError("Incorrect `kibot` section")
|
|
if 'version' not in v:
|
|
raise KiPlotConfigurationError("YAML config needs `kibot.version`.")
|
|
version = v['version']
|
|
# Only version 1 is known
|
|
if version != 1:
|
|
raise KiPlotConfigurationError("Unknown KiBot config version: "+str(version))
|
|
return version
|
|
|
|
def _check_globals_priority(self, v):
|
|
ops = 'imported_global_has_less_priority'
|
|
if ops in v:
|
|
value = v[ops]
|
|
if not isinstance(value, bool):
|
|
raise KiPlotConfigurationError(ops+" must be boolean")
|
|
self.imported_global_has_less_priority = value
|
|
|
|
def _parse_output(self, o_tree):
|
|
try:
|
|
name = str(o_tree['name'])
|
|
if not name:
|
|
raise KeyError
|
|
except KeyError:
|
|
raise KiPlotConfigurationError("Output needs a name in: "+str(o_tree))
|
|
|
|
try:
|
|
otype = o_tree['type']
|
|
if not otype:
|
|
raise KeyError
|
|
except KeyError:
|
|
raise KiPlotConfigurationError("Output `"+name+"` needs a type")
|
|
|
|
try:
|
|
comment = o_tree['comment']
|
|
except KeyError:
|
|
comment = ''
|
|
if comment is None:
|
|
comment = ''
|
|
|
|
name_type = "`"+name+"` ("+otype+")"
|
|
|
|
# Is a valid type?
|
|
if not RegOutput.is_registered(otype):
|
|
raise KiPlotConfigurationError("Unknown output type: `{}`".format(otype))
|
|
# Load it
|
|
logger.debug("Pre-parsing output options for "+name_type)
|
|
o_out = RegOutput.get_class_for(otype)()
|
|
o_out.set_tree(o_tree)
|
|
# Set the data we already know, so we can skip the configurations that aren't requested
|
|
o_out.name = name
|
|
o_out.type = otype
|
|
o_out.comment = comment
|
|
o_out.extends = o_tree.get('extends', '')
|
|
# Pre-parse the run_by_default option
|
|
o_out.run_by_default = o_tree.get('run_by_default', True)
|
|
if not isinstance(o_out.run_by_default, bool):
|
|
o_out.run_by_default = True
|
|
o_out.disable_run_by_default = o_tree.get('disable_run_by_default', '')
|
|
# Pre-parse the disable_run_by_default option
|
|
if isinstance(o_out.disable_run_by_default, str):
|
|
if o_out.disable_run_by_default:
|
|
self.no_run_by_default.append(o_out.disable_run_by_default)
|
|
elif isinstance(o_out.disable_run_by_default, bool):
|
|
# True means to disable the one we extend
|
|
if o_out.disable_run_by_default and o_out.extends:
|
|
self.no_run_by_default.append(o_out.extends)
|
|
else:
|
|
o_out.disable_run_by_default = ''
|
|
# Pre-parse the groups
|
|
o_out._groups = o_tree.get('groups', [])
|
|
if isinstance(o_out._groups, str):
|
|
o_out._groups = [o_out._groups]
|
|
return o_out
|
|
|
|
def _parse_outputs(self, v):
|
|
outputs = []
|
|
if isinstance(v, list):
|
|
for o in v:
|
|
outputs.append(self._parse_output(o))
|
|
else:
|
|
raise KiPlotConfigurationError("`outputs` must be a list")
|
|
return outputs
|
|
|
|
def _parse_group(self, tree, groups):
|
|
try:
|
|
name = str(tree['name'])
|
|
if not name:
|
|
raise KeyError
|
|
except KeyError:
|
|
raise KiPlotConfigurationError("Group needs a name in: "+str(tree))
|
|
outs = tree.get('outputs', [])
|
|
if not isinstance(outs, list):
|
|
raise KiPlotConfigurationError("'outputs' in group `"+name+"` must be a list (not {})".format(type(outs)))
|
|
for v in outs:
|
|
if not isinstance(v, str):
|
|
raise KiPlotConfigurationError("In group `"+name+"`: all outputs must be strings (not {})".format(type(v)))
|
|
if name in groups:
|
|
raise KiPlotConfigurationError("Duplicated group `{}`".format(name))
|
|
groups[name] = outs
|
|
|
|
def _parse_groups(self, v):
|
|
groups = {}
|
|
if isinstance(v, list):
|
|
for o in v:
|
|
self._parse_group(o, groups)
|
|
else:
|
|
raise KiPlotConfigurationError("`groups` must be a list")
|
|
return groups
|
|
|
|
def _parse_variant_or_filter(self, o_tree, kind, reg_class, is_internal=False):
|
|
kind_f = kind[0].upper()+kind[1:]
|
|
try:
|
|
name = str(o_tree['name'])
|
|
if not name:
|
|
raise KeyError
|
|
except KeyError:
|
|
raise KiPlotConfigurationError(kind_f+" needs a name in: "+str(o_tree))
|
|
try:
|
|
otype = o_tree['type']
|
|
if not otype:
|
|
raise KeyError
|
|
except KeyError:
|
|
raise KiPlotConfigurationError(kind_f+" `"+name+"` needs a type")
|
|
# Is a valid type?
|
|
if not reg_class.is_registered(otype):
|
|
raise KiPlotConfigurationError("Unknown {} type: `{}`".format(kind, otype))
|
|
# Load it
|
|
name_type = "`"+name+"` ("+otype+")"
|
|
logger.debug("Parsing "+kind+" "+name_type)
|
|
o_var = reg_class.get_class_for(otype)()
|
|
o_var._internal = is_internal
|
|
o_var.set_tree(o_tree)
|
|
o_var.name = name
|
|
o_var._name_type = name_type
|
|
# Don't configure it yet, wait until we finish loading (could be an import)
|
|
return o_var
|
|
|
|
def _parse_variants(self, v, is_internal=False):
|
|
variants = {}
|
|
if isinstance(v, list):
|
|
for o in v:
|
|
o_var = self._parse_variant_or_filter(o, 'variant', RegVariant, is_internal)
|
|
variants[o_var.name] = o_var
|
|
else:
|
|
raise KiPlotConfigurationError("`variants` must be a list")
|
|
return variants
|
|
|
|
def _parse_filters(self, v, is_internal=False):
|
|
filters = {}
|
|
if isinstance(v, list):
|
|
for o in v:
|
|
o_fil = self._parse_variant_or_filter(o, 'filter', RegFilter, is_internal)
|
|
self.configure_variant_or_filter(o_fil)
|
|
filters[o_fil.name] = o_fil
|
|
else:
|
|
raise KiPlotConfigurationError("`filters` must be a list")
|
|
return filters
|
|
|
|
def _parse_preflights(self, pf):
|
|
logger.debug("Parsing preflight options: {}".format(pf))
|
|
if not isinstance(pf, dict):
|
|
raise KiPlotConfigurationError("Incorrect `preflight` section")
|
|
|
|
preflights = []
|
|
for k, v in pf.items():
|
|
if not BasePreFlight.is_registered(k):
|
|
raise KiPlotConfigurationError("Unknown preflight: `{}`".format(k))
|
|
try:
|
|
logger.debug("Parsing preflight "+k)
|
|
o_pre = BasePreFlight.get_class_for(k)(k, v)
|
|
except KiPlotConfigurationError as e:
|
|
raise KiPlotConfigurationError("In preflight '"+k+"': "+str(e))
|
|
preflights.append(o_pre)
|
|
return preflights
|
|
|
|
def _parse_global(self, gb):
|
|
""" Get global options """
|
|
logger.debug("Parsing global options: {}".format(gb))
|
|
if not isinstance(gb, dict):
|
|
raise KiPlotConfigurationError("Incorrect `global` section (must be a dict)")
|
|
if self.imported_globals:
|
|
if self.imported_global_has_less_priority:
|
|
update_dict(self.imported_globals, gb)
|
|
gb = self.imported_globals
|
|
else:
|
|
update_dict(gb, self.imported_globals)
|
|
logger.debug("Global options + imported: {}".format(gb))
|
|
# Parse all keys inside it
|
|
glb = GS.class_for_global_opts()
|
|
glb.set_tree(gb)
|
|
try:
|
|
glb.config(None)
|
|
except KiPlotConfigurationError as e:
|
|
raise KiPlotConfigurationError("In `global` section: "+str(e))
|
|
|
|
@staticmethod
|
|
def _config_error_import(fname, error):
|
|
if fname is None:
|
|
fname = '*unnamed*'
|
|
raise KiPlotConfigurationError('{} in {} import'.format(error, fname))
|
|
|
|
@staticmethod
|
|
def _parse_import_items(kind, fname, value):
|
|
if isinstance(value, str):
|
|
if value == 'all':
|
|
return None
|
|
elif value == 'none':
|
|
return []
|
|
return [value]
|
|
if isinstance(value, list):
|
|
values = []
|
|
for v in value:
|
|
if isinstance(v, str):
|
|
values.append(v)
|
|
else:
|
|
CfgYamlReader._config_error_import(fname, '`{}` items must be strings ({})'.format(kind, str(v)))
|
|
return values
|
|
CfgYamlReader._config_error_import(fname, '`{}` must be a string or a list ({})'.format(kind, str(v)))
|
|
|
|
def _parse_import_outputs(self, outs, explicit_outs, fn_rel, data, imported):
|
|
sel_outs = []
|
|
if outs is None or len(outs) > 0:
|
|
i_outs = imported.outputs
|
|
if 'outputs' in data:
|
|
i_outs += self._parse_outputs(data['outputs'])
|
|
if outs is not None:
|
|
for o in i_outs:
|
|
if o.name in outs:
|
|
sel_outs.append(o)
|
|
outs.remove(o.name)
|
|
for o in outs:
|
|
logger.warning(W_UNKOUT+"can't import `{}` output from `{}` (missing)".format(o, fn_rel))
|
|
else:
|
|
sel_outs = i_outs
|
|
if len(sel_outs) == 0:
|
|
if explicit_outs:
|
|
logger.warning(W_NOOUTPUTS+"No outputs found in `{}`".format(fn_rel))
|
|
else:
|
|
logger.debug('Outputs loaded from `{}`: {}'.format(fn_rel, [c.name for c in sel_outs]))
|
|
if outs is None and explicit_outs and 'outputs' not in data:
|
|
logger.warning(W_NOOUTPUTS+"No outputs found in `{}`".format(fn_rel))
|
|
return sel_outs
|
|
|
|
def _parse_import_preflights(self, pre, explicit_pres, fn_rel, data, imported):
|
|
sel_pres = []
|
|
if pre is None or len(pre) > 0:
|
|
i_pres = imported.preflights
|
|
if 'preflight' in data:
|
|
i_pres += self._parse_preflights(data['preflight'])
|
|
if pre is not None:
|
|
for p in i_pres:
|
|
if p._name in pre:
|
|
sel_pres.append(p)
|
|
pre.remove(p._name)
|
|
for p in pre:
|
|
logger.warning(W_UNKOUT+"can't import `{}` preflight from `{}` (missing)".format(p, fn_rel))
|
|
else:
|
|
sel_pres = i_pres
|
|
if len(sel_pres) == 0:
|
|
if explicit_pres:
|
|
logger.warning(W_NOPREFLIGHTS+"No preflights found in `{}`".format(fn_rel))
|
|
else:
|
|
logger.debug('Preflights loaded from `{}`: {}'.format(fn_rel, [c._name for c in sel_pres]))
|
|
if pre is None and explicit_pres and 'preflight' not in data:
|
|
logger.warning(W_NOPREFLIGHTS+"No preflights found in `{}`".format(fn_rel))
|
|
return sel_pres
|
|
|
|
def _parse_import_filters(self, filters, explicit_fils, fn_rel, data, imported, is_internal):
|
|
sel_fils = {}
|
|
if filters is None or len(filters) > 0:
|
|
if 'filters' in data:
|
|
imported.filters.update(self._parse_filters(data['filters'], is_internal))
|
|
i_fils = imported.filters
|
|
if filters is not None:
|
|
for f in filters:
|
|
if f in i_fils:
|
|
sel_fils[f] = i_fils[f]
|
|
else:
|
|
logger.warning(W_UNKOUT+"can't import `{}` filter from `{}` (missing)".format(f, fn_rel))
|
|
else:
|
|
sel_fils = i_fils
|
|
if len(sel_fils) == 0:
|
|
if explicit_fils:
|
|
logger.warning(W_NOFILTERS+"No filters found in `{}`".format(fn_rel))
|
|
else:
|
|
logger.debug('Filters loaded from `{}`: {}'.format(fn_rel, sel_fils.keys()))
|
|
if filters is None and explicit_fils and 'filters' not in data:
|
|
logger.warning(W_NOFILTERS+"No filters found in `{}`".format(fn_rel))
|
|
return sel_fils
|
|
|
|
def _parse_import_groups(self, groups, explicit_grps, fn_rel, data, imported):
|
|
sel_grps = {}
|
|
if groups is None or len(groups) > 0:
|
|
if 'groups' in data:
|
|
imported.groups.update(self._parse_groups(data['groups']))
|
|
i_grps = imported.groups
|
|
if groups is not None:
|
|
for f in groups:
|
|
if f in i_grps:
|
|
sel_grps[f] = i_grps[f]
|
|
else:
|
|
logger.warning(W_UNKOUT+"can't import `{}` group from `{}` (missing)".format(f, fn_rel))
|
|
else:
|
|
sel_grps = i_grps
|
|
if len(sel_grps) == 0:
|
|
if explicit_grps:
|
|
logger.warning(W_NOGROUPS+"No groups found in `{}`".format(fn_rel))
|
|
else:
|
|
logger.debug('groups loaded from `{}`: {}'.format(fn_rel, sel_grps.keys()))
|
|
if groups is None and explicit_grps and 'groups' not in data:
|
|
logger.warning(W_NOGROUPS+"No groups found in `{}`".format(fn_rel))
|
|
return sel_grps
|
|
|
|
def _parse_import_variants(self, vars, explicit_vars, fn_rel, data, imported, is_internal):
|
|
sel_vars = {}
|
|
if vars is None or len(vars) > 0:
|
|
if 'variants' in data:
|
|
imported.variants.update(self._parse_variants(data['variants'], is_internal))
|
|
i_vars = imported.variants
|
|
if vars is not None:
|
|
for f in vars:
|
|
if f in i_vars:
|
|
sel_vars[f] = i_vars[f]
|
|
else:
|
|
logger.warning(W_UNKOUT+"can't import `{}` variant from `{}` (missing)".format(f, fn_rel))
|
|
else:
|
|
sel_vars = i_vars
|
|
if len(sel_vars) == 0:
|
|
if explicit_vars:
|
|
logger.warning(W_NOVARIANTS+"No variants found in `{}`".format(fn_rel))
|
|
else:
|
|
logger.debug('Variants loaded from `{}`: {}'.format(fn_rel, sel_vars.keys()))
|
|
if vars is None and explicit_vars and 'variants' not in data:
|
|
logger.warning(W_NOVARIANTS+"No variants found in `{}`".format(fn_rel))
|
|
return sel_vars
|
|
|
|
def _parse_import_globals(self, globals, explicit_globals, fn_rel, data, imported):
|
|
sel_globals = {}
|
|
if (globals is None or len(globals) > 0):
|
|
i_globals = data.get('global', {})
|
|
if not isinstance(i_globals, dict):
|
|
raise KiPlotConfigurationError("Incorrect `global` section (must be a dict), while importing from {}".
|
|
format(fn_rel))
|
|
update_dict(imported.globals, i_globals)
|
|
i_globals = imported.globals
|
|
if globals is not None:
|
|
for f in globals:
|
|
if f in i_globals:
|
|
sel_globals[f] = i_globals[f]
|
|
else:
|
|
logger.warning(W_UNKOUT+"can't import `{}` global from `{}` (missing)".format(f, fn_rel))
|
|
else:
|
|
sel_globals = i_globals
|
|
if len(sel_globals) == 0:
|
|
if explicit_globals:
|
|
logger.warning(W_NOGLOBALS+"No globals found in `{}`".format(fn_rel))
|
|
else:
|
|
logger.debug('Globals loaded from `{}`: {}'.format(fn_rel, sel_globals.keys()))
|
|
if globals is None and explicit_globals and 'global' not in data:
|
|
logger.warning(W_NOGLOBALS+"No globals found in `{}`".format(fn_rel))
|
|
return sel_globals
|
|
|
|
def configure_variant_or_filter(self, o_var):
|
|
try:
|
|
o_var.config(None)
|
|
except KiPlotConfigurationError as e:
|
|
msg = "In filter/variant '"+o_var.name+"' ("+o_var.type+"): "+str(e)
|
|
config_error(msg)
|
|
|
|
def configure_variants(self, variants):
|
|
logger.debug('Configuring variants')
|
|
for o_var in variants.values():
|
|
self.configure_variant_or_filter(o_var)
|
|
|
|
def check_import_file_name(self, dir_name, fn, is_external):
|
|
fn = os.path.expandvars(os.path.expanduser(fn))
|
|
is_internal = False
|
|
if not is_external and not os.path.splitext(fn)[1] and not os.path.isabs(fn):
|
|
name = fn
|
|
fn = os.path.join(GS.get_resource_path('config_templates'), fn+'.kibot.yaml')
|
|
if not os.path.isfile(fn):
|
|
fn_abs = os.path.join(dir_name, name)
|
|
if not os.path.isfile(fn_abs):
|
|
raise KiPlotConfigurationError("Unknown internal import file `{}` ({})".format(name, fn))
|
|
# Bizarre case: looks like an internal
|
|
fn = fn_abs
|
|
else:
|
|
is_internal = True
|
|
else:
|
|
if not os.path.isabs(fn):
|
|
fn = os.path.join(dir_name, fn)
|
|
if not os.path.isfile(fn):
|
|
raise KiPlotConfigurationError("Missing import file `{}`".format(fn))
|
|
return fn, is_internal
|
|
|
|
def _parse_import(self, imp, name, collected_definitions, apply=True, depth=0):
|
|
""" Get imports """
|
|
logger.debug("Parsing imports: {}".format(imp))
|
|
depth += 1
|
|
if depth > 20:
|
|
raise KiPlotConfigurationError("Import depth greater than 20, make sure this isn't an infinite loop")
|
|
if not isinstance(imp, list):
|
|
raise KiPlotConfigurationError("Incorrect `import` section (must be a list)")
|
|
# Import the files
|
|
dir_name = os.path.dirname(os.path.abspath(name))
|
|
all_collected = CollectedImports()
|
|
for entry in imp:
|
|
explicit_fils = explicit_vars = explicit_globals = explicit_pres = explicit_groups = False
|
|
local_defs = {}
|
|
if isinstance(entry, str):
|
|
is_external = True
|
|
fn = entry
|
|
outs = None
|
|
filters = []
|
|
vars = []
|
|
globals = []
|
|
pre = []
|
|
groups = []
|
|
explicit_outs = True
|
|
elif isinstance(entry, dict):
|
|
fn = outs = filters = vars = globals = pre = groups = None
|
|
explicit_outs = is_external = False
|
|
for k, v in entry.items():
|
|
if k == 'file':
|
|
if not isinstance(v, str):
|
|
raise KiPlotConfigurationError("`import.file` must be a string ({})".format(str(v)))
|
|
fn = v
|
|
elif k == 'is_external':
|
|
if not isinstance(v, bool):
|
|
raise KiPlotConfigurationError("`import.is_external` must be a true/false ({})".format(str(v)))
|
|
is_external = v
|
|
elif k == 'outputs':
|
|
outs = self._parse_import_items(k, fn, v)
|
|
explicit_outs = True
|
|
elif k == 'preflights':
|
|
pre = self._parse_import_items(k, fn, v)
|
|
explicit_pres = True
|
|
elif k == 'filters':
|
|
filters = self._parse_import_items(k, fn, v)
|
|
explicit_fils = True
|
|
elif k == 'variants':
|
|
vars = self._parse_import_items(k, fn, v)
|
|
explicit_vars = True
|
|
elif k in ['global', 'globals']:
|
|
globals = self._parse_import_items(k, fn, v)
|
|
explicit_globals = True
|
|
elif k == 'groups':
|
|
groups = self._parse_import_items(k, fn, v)
|
|
explicit_groups = True
|
|
elif k == 'definitions':
|
|
if not isinstance(v, dict):
|
|
CfgYamlReader._config_error_import(fn, 'definitions must be a dict')
|
|
local_defs = v
|
|
else:
|
|
self._config_error_import(fn, "Unknown import entry `{}`".format(str(v)))
|
|
if fn is None:
|
|
raise KiPlotConfigurationError("`import` entry without `file` ({})".format(str(entry)))
|
|
else:
|
|
raise KiPlotConfigurationError("`import` items must be strings or dicts ({})".format(str(entry)))
|
|
fn, is_internal = self.check_import_file_name(dir_name, fn, is_external)
|
|
fn_rel = os.path.relpath(fn)
|
|
# Create a new dict for definitions applying the new ones and nake it the last
|
|
cur_definitions = deepcopy(collected_definitions[-1])
|
|
cur_definitions.update(local_defs)
|
|
collected_definitions.append(cur_definitions)
|
|
# Now load the YAML
|
|
data = self.load_yaml(open(fn), collected_definitions, file_name=fn)
|
|
if 'import' in data:
|
|
# Do a recursive import
|
|
imported = self._parse_import(data['import'], fn, collected_definitions, apply=False, depth=depth)
|
|
else:
|
|
# Nothing to import, start fresh
|
|
imported = CollectedImports()
|
|
collected_definitions.pop()
|
|
# Parse and filter all stuff, add them to all_collected
|
|
# Outputs
|
|
all_collected.outputs.extend(self._parse_import_outputs(outs, explicit_outs, fn_rel, data, imported))
|
|
# Preflights
|
|
all_collected.preflights.extend(self._parse_import_preflights(pre, explicit_pres, fn_rel, data, imported))
|
|
# Filters
|
|
all_collected.filters.update(self._parse_import_filters(filters, explicit_fils, fn_rel, data, imported,
|
|
is_internal))
|
|
# Variants
|
|
all_collected.variants.update(self._parse_import_variants(vars, explicit_vars, fn_rel, data, imported,
|
|
is_internal))
|
|
# Globals
|
|
update_dict(all_collected.globals, self._parse_import_globals(globals, explicit_globals, fn_rel, data, imported))
|
|
# Groups
|
|
all_collected.groups.update(self._parse_import_groups(groups, explicit_groups, fn_rel, data, imported))
|
|
if apply:
|
|
# This is the main import (not a recursive one) apply the results
|
|
RegOutput.add_filters(all_collected.filters)
|
|
self.configure_variants(all_collected.variants)
|
|
RegOutput.add_variants(all_collected.variants)
|
|
self.imported_globals = all_collected.globals
|
|
BasePreFlight.add_preflights(all_collected.preflights)
|
|
RegOutput.add_outputs(all_collected.outputs, fn_rel)
|
|
RegOutput.add_groups(all_collected.groups, fn_rel)
|
|
return all_collected
|
|
|
|
def load_yaml(self, fstream, collected_definitions, file_name=None):
|
|
# We support some sort of defaults for the -E definitions
|
|
# To implement it we use a separated "document" inside the same file
|
|
# Load the file to memory so we can preprocess it
|
|
content = fstream.read()
|
|
docs = re.split(r"^\.\.\.$", content, flags=re.M)
|
|
local_defs = None
|
|
if len(docs) > 1:
|
|
definitions = None
|
|
for doc in docs:
|
|
if re.search(r"^kibot:\s*$", doc, flags=re.M):
|
|
content = doc
|
|
elif re.search(r"^definitions:\s*$", doc, flags=re.M):
|
|
definitions = doc
|
|
if definitions:
|
|
logger.debug("Found local definitions")
|
|
try:
|
|
data = yaml.safe_load(io.StringIO(definitions))
|
|
except yaml.YAMLError as e:
|
|
raise KiPlotConfigurationError("Error loading YAML ("+str(file_name)+") "+str(e))
|
|
local_defs = data.get('definitions')
|
|
if not local_defs:
|
|
raise KiPlotConfigurationError("Error loading default definitions from config")
|
|
if not isinstance(local_defs, dict):
|
|
raise KiPlotConfigurationError("Error default definitions must be a dict")
|
|
logger.debug("- Local definitions: "+str(local_defs))
|
|
logger.debug("- Current definitions: "+str(collected_definitions[-1]))
|
|
local_defs.update(collected_definitions[-1])
|
|
collected_definitions[-1] = local_defs
|
|
logger.debug("- Updated definitions: "+str(collected_definitions[-1]))
|
|
# Apply the definitions
|
|
if GS.cli_defines or collected_definitions[-1]:
|
|
logger.debug('Applying preprocessor definitions')
|
|
replaced = True
|
|
depth = 0
|
|
while replaced and depth < 20:
|
|
replaced = False
|
|
depth += 1
|
|
# Replace all
|
|
logger.debug("- Applying CLI definitions: "+str(GS.cli_defines))
|
|
for k, v in GS.cli_defines.items():
|
|
content, replaced = do_replace(k, v, content, replaced)
|
|
logger.debug("- Applying collected definitions: "+str(collected_definitions[-1]))
|
|
for k, v in collected_definitions[-1].items():
|
|
content, replaced = do_replace(k, v, content, replaced)
|
|
if depth >= 20:
|
|
logger.non_critical_error('Maximum depth of definition replacements reached, loop?')
|
|
if GS.debug_level > 3:
|
|
logger.debug('YAML after expanding definitions:\n'+content)
|
|
# Create an stream from the string
|
|
fstream = io.StringIO(content)
|
|
try:
|
|
data = yaml.safe_load(fstream)
|
|
except yaml.YAMLError as e:
|
|
raise KiPlotConfigurationError("Error loading YAML "+str(e))
|
|
# Accept `globals` for `global`
|
|
if 'globals' in data and 'global' not in data:
|
|
data['global'] = data['globals']
|
|
del data['globals']
|
|
return data
|
|
|
|
def _check_invalid_in_kibot(self, main_sec):
|
|
defined_in_kibot = set(main_sec.keys())
|
|
invalid_in_kibot = defined_in_kibot-VALID_KIBOT_SEC
|
|
for k in invalid_in_kibot:
|
|
raise KiPlotConfigurationError('Unknown option `{}` in kibot/kiplot.'.format(k))
|
|
|
|
def read(self, fstream):
|
|
"""
|
|
Read a file object into a config object
|
|
|
|
:param fstream: file stream of a config YAML file
|
|
"""
|
|
collected_definitions = [{}]
|
|
data = self.load_yaml(fstream, collected_definitions)
|
|
# Analyze the version
|
|
# Currently just checks for v1
|
|
v1 = data.get('kiplot', None)
|
|
v2 = data.get('kibot', None)
|
|
if v1 and v2:
|
|
raise KiPlotConfigurationError("Use `kibot` or `kiplot` but not both.")
|
|
if not v1 and not v2:
|
|
raise KiPlotConfigurationError("YAML config needs `kibot.version`.")
|
|
main_sec = v1 or v2
|
|
self._check_version(main_sec)
|
|
self._check_globals_priority(main_sec)
|
|
self._check_invalid_in_kibot(main_sec)
|
|
# Look for imports
|
|
v1 = data.get('import', None)
|
|
if v1:
|
|
self._parse_import(v1, fstream.name, collected_definitions)
|
|
# Look for globals
|
|
# If no globals defined initialize them with default values
|
|
self._parse_global(data.get('global', {}))
|
|
# Look for filters
|
|
v1 = data.get('filters', None)
|
|
if v1:
|
|
RegOutput.add_filters(self._parse_filters(v1))
|
|
# Look for variants
|
|
v1 = data.get('variants', None)
|
|
if v1:
|
|
variants = self._parse_variants(v1)
|
|
self.configure_variants(variants)
|
|
RegOutput.add_variants(variants)
|
|
# Solve the global variant
|
|
if GS.global_variant:
|
|
try:
|
|
GS.solved_global_variant = RegOutput.check_variant(GS.global_variant)
|
|
except KiPlotConfigurationError as e:
|
|
raise KiPlotConfigurationError("In global section: "+str(e))
|
|
# Look for preflights
|
|
v1 = data.get('preflight', None)
|
|
if v1:
|
|
BasePreFlight.add_preflights(self._parse_preflights(v1))
|
|
# Look for outputs
|
|
v1 = data.get('outputs', None)
|
|
if v1:
|
|
RegOutput.add_outputs(self._parse_outputs(v1))
|
|
# Look for groups
|
|
v1 = data.get('groups', None)
|
|
if v1:
|
|
RegOutput.add_groups(self._parse_groups(v1))
|
|
# Report invalid sections (the first we find)
|
|
defined_sections = set(data.keys())
|
|
invalid_sections = defined_sections-VALID_SECTIONS
|
|
for k in invalid_sections:
|
|
raise KiPlotConfigurationError('Unknown section `{}` in config.'.format(k))
|
|
# Ok, now we have all the outputs loaded, so we can apply the disable_run_by_default
|
|
for name in self.no_run_by_default:
|
|
o = RegOutput.get_output(name)
|
|
if o:
|
|
o.run_by_default = False
|
|
logger.debug("Disabling the default run for `{}`".format(o))
|
|
# Apply the groups selection from the outputs
|
|
outs = RegOutput.get_outputs()
|
|
for o in outs:
|
|
for g in o._groups:
|
|
if not RegOutput.add_to_group(o.name, g):
|
|
grps = list(RegOutput.get_group_names())
|
|
grps.remove(g)
|
|
best_matches = difflib.get_close_matches(g, grps)
|
|
logger.warning(W_NEWGROUP+'Added {} to a new group `{}`'.format(o, g))
|
|
if best_matches:
|
|
logger.warning(W_NEWGROUP+"Did you mean {}?".format(' or '.join(best_matches)))
|
|
else:
|
|
logger.warning(W_NEWGROUP+"Suggestion: declare it first so we know it isn't a typo")
|
|
return outs
|
|
|
|
|
|
def trim(docstring):
|
|
""" PEP 257 recommended trim for __doc__ """
|
|
if docstring is None:
|
|
return []
|
|
# Convert tabs to spaces (following the normal Python rules)
|
|
# and split into a list of lines:
|
|
lines = docstring.expandtabs().splitlines()
|
|
# Determine minimum indentation (first line doesn't count):
|
|
indent = maxsize
|
|
for line in lines[1:]:
|
|
stripped = line.lstrip()
|
|
if stripped:
|
|
indent = min(indent, len(line) - len(stripped))
|
|
# Remove indentation (first line is special):
|
|
trimmed = [lines[0].strip()]
|
|
if indent < maxsize:
|
|
for line in lines[1:]:
|
|
trimmed.append(line[indent:].rstrip())
|
|
# Strip off trailing and leading blank lines:
|
|
# while trimmed and not trimmed[-1]:
|
|
# trimmed.pop()
|
|
while trimmed and not trimmed[0]:
|
|
trimmed.pop(0)
|
|
# Return a single string:
|
|
return trimmed
|
|
|
|
|
|
def print_output_options(name, cl, indent, context=None, skip_keys=False):
|
|
ind_str = indent*' '
|
|
obj = cl()
|
|
num_opts = 0 if not skip_keys else 1
|
|
ind_size = 3 if rst_mode else 2
|
|
ind_base_sp = '- '
|
|
if not skip_keys:
|
|
ind_base_sp = ' '*ind_size+ind_base_sp
|
|
if rst_mode:
|
|
ind_base_sp += ' '
|
|
for k, v in sorted(obj.get_attrs_gen(), key=lambda x: not obj.is_basic_option(x[0])):
|
|
if k == 'type':
|
|
if indent == ind_size:
|
|
# Type is fixed for an output
|
|
continue
|
|
if not num_opts:
|
|
# We found one, put the title
|
|
if rst_mode:
|
|
print()
|
|
ind = '- ' if rst_mode else '*'
|
|
print(ind_str+ind+' Valid keys:')
|
|
if rst_mode:
|
|
print()
|
|
help, alias, is_alias = obj.get_doc(k)
|
|
dot = True
|
|
if k == 'type' and not indent:
|
|
help = f"*'{name}'"
|
|
dot = False
|
|
is_basic = False
|
|
if help and help[0] == '*':
|
|
help = help[1:]
|
|
is_basic = True
|
|
if is_alias:
|
|
help = 'Alias for '+alias
|
|
entry = '*{}*: '
|
|
elif is_basic:
|
|
entry = '**{}**: ' if rst_mode else '**`{}`**: '
|
|
else:
|
|
entry = '``{}``: ' if rst_mode else '`{}`: '
|
|
entry = ind_base_sp+entry
|
|
if help is None:
|
|
help = 'Undocumented'
|
|
logger.non_critical_error(f'Undocumented option: `{k}`')
|
|
lines = help.split('\n')
|
|
preface = ind_str+entry.format(k)
|
|
if rst_mode and context:
|
|
# Index entry
|
|
preface = preface[:-2] + f' :index:`: <pair: {context}; {k}>` '
|
|
clines = len(lines)
|
|
print('{}{}{}'.format(preface, adapt_text(lines[0].strip()), '.' if clines == 1 and dot else ''))
|
|
if rst_mode:
|
|
if skip_keys:
|
|
ind_help = (indent+ind_size)*' '
|
|
else:
|
|
ind_help = (indent+2*ind_size)*' '
|
|
else:
|
|
ind_help = len(preface)*' '
|
|
in_list = False
|
|
for ln in range(1, clines):
|
|
text = lines[ln].strip()
|
|
# Dots at the beginning are replaced by spaces.
|
|
# Used to keep indentation.
|
|
if text[0] == '.':
|
|
for i in range(1, len(text)):
|
|
if text[i] != '.':
|
|
break
|
|
if rst_mode and text[i] == ' ':
|
|
text = text[i:].strip()
|
|
else:
|
|
text = ' '*i+text[i:]
|
|
elif text[0] == '-':
|
|
if not in_list:
|
|
in_list = True
|
|
print()
|
|
if ln < clines-1 and lines[ln+1].lstrip()[0] == '.':
|
|
text += ' |br|'
|
|
else:
|
|
if in_list:
|
|
in_list = False
|
|
print()
|
|
print('{}{}'.format(ind_help+adapt_text(text), '.' if ln+1 == clines else ''))
|
|
num_opts = num_opts+1
|
|
if isinstance(v, type):
|
|
new_context = context
|
|
if new_context:
|
|
new_context += ' - '+k
|
|
i = indent+ind_size
|
|
if not skip_keys:
|
|
i += ind_size
|
|
print_output_options('', v, i, new_context)
|
|
if rst_mode:
|
|
print()
|
|
# if num_opts == 0:
|
|
# print(ind_str+' - No available options')
|
|
|
|
|
|
def print_one_out_help(details, n, o):
|
|
lines = trim(o.__doc__)
|
|
if len(lines) == 0:
|
|
lines = ['Undocumented', 'No description']
|
|
ind_size = 3 if rst_mode else 2
|
|
ind = ' '*ind_size
|
|
extra = ' ' if rst_mode else ''
|
|
if details and GS.out_dir_in_cmd_line and rst_mode:
|
|
# reST using separated files
|
|
dest = os.path.relpath(os.path.join(GS.out_dir, f'{n}.rst'))
|
|
with open(dest, 'wt') as f:
|
|
f.write(RST_WARNING+'\n')
|
|
f.write(f'.. index::\n pair: {lines[0]}; {n}\n\n')
|
|
print(lines[0], file=f)
|
|
print('~'*len(lines[0]), file=f)
|
|
print(file=f)
|
|
t, r = reformat_text('\n'.join(lines[1:]), 0)
|
|
print(t, file=f)
|
|
f.write(r)
|
|
f.write(f'\n\nType: ``{n}``\n\n')
|
|
# Category
|
|
category = o()._category
|
|
if category:
|
|
if isinstance(category, list):
|
|
f.write('Categories: '+', '.join((f'**{c}**' for c in category))+'\n')
|
|
else:
|
|
f.write(f'Category: **{category}**\n')
|
|
f.write('\nParameters:\n\n')
|
|
ori = sys.stdout
|
|
sys.stdout = f
|
|
print_output_options(n, o, 0, 'output - '+n, skip_keys=True)
|
|
sys.stdout = ori
|
|
elif details:
|
|
print('- '+lines[0])
|
|
n2 = f'``{n}``' if rst_mode else f'`{n}`'
|
|
print(f'{ind}- {extra}Type: {n2}')
|
|
print(f'{ind}- {extra}Description: '+adapt_text(lines[1]))
|
|
if rst_mode:
|
|
f, r = reformat_text('\n'.join(lines[1:]), ind_size*2)
|
|
print(r)
|
|
else:
|
|
for ln in range(2, len(lines)):
|
|
print(' '+adapt_text(lines[ln]))
|
|
print_output_options(n, o, ind_size, 'output - '+n)
|
|
else:
|
|
print('* {} [{}]'.format(lines[0], n))
|
|
|
|
|
|
def print_outputs_help(rst, details=False):
|
|
outs = RegOutput.get_registered()
|
|
make_title(rst, 'outputs', len(outs))
|
|
split = details and GS.out_dir_in_cmd_line and rst_mode
|
|
if details:
|
|
print('Notes:')
|
|
if rst_mode:
|
|
print()
|
|
print('1. Most relevant options are listed first and in **bold**. '
|
|
'Which ones are more relevant is quite arbitrary, comments are welcome.')
|
|
print('2. Aliases are listed in *italics*.')
|
|
if split:
|
|
print('\n.. toctree::')
|
|
print(' :maxdepth: 2\n')
|
|
for n, o in OrderedDict(sorted(outs.items())).items():
|
|
if details:
|
|
if split:
|
|
print(f' outputs/{n}')
|
|
else:
|
|
print()
|
|
print_one_out_help(details, n, o)
|
|
|
|
|
|
def print_output_help(name):
|
|
if not RegOutput.is_registered(name):
|
|
GS.exit_with_error(f'Unknown output type `{name}`, try --help-list-outputs', EXIT_BAD_ARGS)
|
|
print_one_out_help(True, name, RegOutput.get_class_for(name))
|
|
|
|
|
|
def make_title(rst, tp, n, sub='^'):
|
|
global rst_mode
|
|
rst_mode = rst
|
|
logger.debug('{} supported {}'.format(n, tp))
|
|
if rst:
|
|
print(RST_WARNING)
|
|
title = 'Supported '+tp
|
|
print(title)
|
|
if rst_mode:
|
|
print((len(title)*sub)+'\n')
|
|
return 3, ' '
|
|
print()
|
|
return 2, ''
|
|
|
|
|
|
def print_preflights_help(rst):
|
|
prefs = BasePreFlight.get_registered()
|
|
ind_size, extra = make_title(rst, 'preflights', len(prefs))
|
|
for n, o in OrderedDict(sorted(prefs.items())).items():
|
|
help, options = o.get_doc()
|
|
if help is None:
|
|
help = 'Undocumented'
|
|
else:
|
|
help, rest = reformat_text(help, ind_size)
|
|
if rest:
|
|
help += '\n'+rest
|
|
index = f':index:`: <pair: preflights; {n}>` ' if rst else ''
|
|
print(f'- {extra}**{n}**: {index}{help}.')
|
|
if options:
|
|
print_output_options(n, options, ind_size, 'preflight - '+n)
|
|
|
|
|
|
def print_variants_help(rst):
|
|
from .var_base import BaseVariant
|
|
vars = BaseVariant.get_registered()
|
|
ind_size, extra = make_title(rst, 'variants', len(vars))
|
|
for n, o in OrderedDict(sorted(vars.items())).items():
|
|
help = o.__doc__
|
|
if help is None:
|
|
help = 'Undocumented'
|
|
title = ''
|
|
else:
|
|
title, help = reformat_text(help, ind_size)
|
|
title = f'(**{title}**)'
|
|
if rst:
|
|
title = f'{title} :index:`. <pair: variant; {n}>`'
|
|
print(f'- {extra}**{n}**: {title}\n\n{help}.')
|
|
print_output_options(n, o, ind_size, 'variant - '+n)
|
|
|
|
|
|
def adapt_text(text):
|
|
if rst_mode:
|
|
text = text.replace("\\", "\\\\")
|
|
text = text.replace("_'", "\\_'")
|
|
lines = text.splitlines()
|
|
if len(lines) > 1:
|
|
t = []
|
|
in_list = False
|
|
in_warning = False
|
|
for ln in lines:
|
|
if ln[0] == '-':
|
|
if not in_list:
|
|
in_list = True
|
|
t.append('')
|
|
else:
|
|
if in_list:
|
|
in_list = False
|
|
t.append('')
|
|
if ln[-1] == '.' and not in_list:
|
|
ln += ' |br|'
|
|
if 'Warning: ' in ln:
|
|
indent = ln.index('Warning: ')
|
|
t.append('')
|
|
t.append('.. warning::')
|
|
in_warning = True
|
|
ln = ln[:indent]+ln[indent+9:]
|
|
t.append(ln)
|
|
if in_warning:
|
|
t.append('.. ')
|
|
text = '\n'.join(t)
|
|
return adapt_to_rst_urls(text)
|
|
text = text.replace('\\*', '*')
|
|
return text
|
|
|
|
|
|
def adapt_to_rst_urls(text):
|
|
return re.sub(r'\[([^\]]*)\]\(([^\)]*)\)', r'`\1 <\2>`__', text)
|
|
|
|
|
|
def reformat_text(txt, ind_size):
|
|
# Keep first line
|
|
lines = txt.splitlines(True)
|
|
first = lines[0]
|
|
if not rst_mode:
|
|
return first.strip(), (''.join(lines[1:])).rstrip()
|
|
# Unindent
|
|
rest = textwrap.dedent(''.join(lines[1:]))
|
|
# Now indent
|
|
return first.strip(), adapt_text(textwrap.indent(rest, ' '*ind_size).rstrip())
|
|
|
|
|
|
def print_filters_help(rst):
|
|
filters = RegFilter.get_registered()
|
|
ind_size, extra = make_title(rst, 'filters', len(filters))
|
|
for n, o in OrderedDict(sorted(filters.items())).items():
|
|
help = o.__doc__
|
|
if help is None:
|
|
help = ''
|
|
title = 'Undocumented'
|
|
else:
|
|
title, help = reformat_text(help, ind_size)
|
|
title = f'(**{title}**)'
|
|
|
|
print(f'- {extra}**{n}**: {title}')
|
|
if help:
|
|
print(f'{help}.')
|
|
print_output_options(n, o, ind_size, 'filter - '+n)
|
|
|
|
|
|
def print_global_options_help(rst):
|
|
global rst_mode
|
|
rst_mode = rst
|
|
if rst:
|
|
print(RST_WARNING)
|
|
print('- global')
|
|
print_output_options('Global options', GS.class_for_global_opts, 3 if rst_mode else 2, 'global options')
|
|
|
|
|
|
def quoted(val):
|
|
if "'" in val:
|
|
return '"{}"'.format(val)
|
|
return "'{}'".format(val)
|
|
|
|
|
|
def print_example_options(f, cls, name, indent, po, is_list=False):
|
|
ind_str = indent*' '
|
|
obj = cls()
|
|
first = True
|
|
if po:
|
|
obj.read_vals_from_po(po)
|
|
for k, _ in obj.get_attrs_gen():
|
|
help, alias, is_alias = obj.get_doc(k, no_basic=True)
|
|
if is_alias:
|
|
f.write(ind_str+'# `{}` is an alias for `{}`\n'.format(k, alias))
|
|
continue
|
|
if help:
|
|
help_lines = help.split('\n')
|
|
for hl in help_lines:
|
|
# Dots at the beginning are replaced by spaces.
|
|
# Used to keep indentation.
|
|
hl = hl.strip()
|
|
if hl[0] == '.':
|
|
for i in range(1, len(hl)):
|
|
if hl[i] != '.':
|
|
break
|
|
hl = ' '*i+hl[i:]
|
|
f.write(ind_str+'# '+adapt_text(hl)+'\n')
|
|
example_attr = '_'+k+'_example'
|
|
if hasattr(obj, example_attr):
|
|
val = getattr(obj, example_attr)
|
|
else:
|
|
val = getattr(obj, k)
|
|
if isinstance(val, str):
|
|
val = quoted(val)
|
|
elif isinstance(val, bool):
|
|
val = str(val).lower()
|
|
if isinstance(val, type):
|
|
if val.__name__ == 'Optionable' and help and '=' in help_lines[0]:
|
|
# Get the text after =
|
|
txt = help_lines[0].split('=')[1]
|
|
# Get the text before the space, without the ]
|
|
txt = txt.split()[0][:-1]
|
|
f.write(ind_str+'{}: {}\n'.format(k, txt))
|
|
elif val.get_default():
|
|
f.write(ind_str+'{}: {}\n'.format(k, val.get_default()))
|
|
else:
|
|
if is_list and first:
|
|
k = '- '+k
|
|
f.write(ind_str+'{}:\n'.format(k))
|
|
extra_indent = 2 if not is_list else 4
|
|
print_example_options(f, val, '', indent+extra_indent, None, help and 'list(dict' in help_lines[0])
|
|
if is_list and first:
|
|
ind_str += ' '
|
|
first = False
|
|
else:
|
|
if is_list and first:
|
|
k = '- '+k
|
|
if val is None:
|
|
val = 'null'
|
|
f.write(ind_str+'{}: {}\n'.format(k, val))
|
|
if is_list and first:
|
|
ind_str += ' '
|
|
first = False
|
|
return obj
|
|
|
|
|
|
def create_example(pcb_file, out_dir, copy_options, copy_expand):
|
|
if not os.path.exists(out_dir):
|
|
os.makedirs(out_dir)
|
|
fname = os.path.join(out_dir, EXAMPLE_CFG)
|
|
if os.path.isfile(fname):
|
|
GS.exit_with_error(fname+" already exists, won't overwrite", WONT_OVERWRITE)
|
|
with open(fname, 'w') as f:
|
|
logger.info('Creating {} example configuration'.format(fname))
|
|
f.write("# ATTENTION! THIS ISN'T A FULLY FUNCTIONAL EXAMPLE.\n")
|
|
f.write("# You should take portions of this example and edit the options to make\n")
|
|
f.write("# them suitable for your use.\n")
|
|
f.write("# This file is useful to know all the available options.\n")
|
|
f.write('kibot:\n version: 1\n')
|
|
# Preflights
|
|
f.write('\npreflight:\n')
|
|
prefs = BasePreFlight.get_registered()
|
|
for n, o in OrderedDict(sorted(prefs.items())).items():
|
|
if o.__doc__:
|
|
lines = trim(o.__doc__.rstrip()+'.')
|
|
for ln in lines:
|
|
f.write(' # '+ln.rstrip()+'\n')
|
|
example = o.get_example()
|
|
if not example.startswith("\n"):
|
|
example = ' '+example
|
|
f.write(' {}:{}\n'.format(n, example))
|
|
# Outputs
|
|
outs = RegOutput.get_registered()
|
|
f.write('\noutputs:\n')
|
|
# List of layers
|
|
po = None
|
|
layers = 'all'
|
|
if pcb_file:
|
|
# We have a PCB to take as reference
|
|
board = GS.load_board(pcb_file)
|
|
if copy_options or copy_expand:
|
|
# Layers and plot options from the PCB
|
|
layers = 'selected'
|
|
po = board.GetPlotOptions()
|
|
for n, cls in OrderedDict(sorted(outs.items())).items():
|
|
lines = trim(cls.__doc__)
|
|
if len(lines) == 0:
|
|
lines = ['Undocumented', 'No description']
|
|
f.write(' # '+lines[0].rstrip()+':\n')
|
|
for ln in range(2, len(lines)):
|
|
f.write(' # '+lines[ln].rstrip()+'\n')
|
|
f.write(" - name: '{}_example'\n".format(n))
|
|
f.write(" comment: {}\n".format(quoted(lines[1])))
|
|
f.write(" type: '{}'\n".format(n))
|
|
f.write(" dir: 'Example/{}_dir'\n".format(n))
|
|
f.write(" options:\n")
|
|
obj = cls()
|
|
print_example_options(f, obj.options, n, 6, po)
|
|
if 'layers' in obj.__dict__:
|
|
if copy_expand:
|
|
f.write(' layers:\n')
|
|
layers = obj.layers.solve(layers)
|
|
for layer in layers:
|
|
f.write(" - layer: '{}'\n".format(layer.layer))
|
|
f.write(" suffix: '{}'\n".format(layer.suffix))
|
|
if layer.description:
|
|
f.write(" description: '{}'\n".format(layer.description))
|
|
else:
|
|
f.write(' layers: {}\n'.format(layers))
|
|
|
|
|
|
def global2human(name):
|
|
return '`'+name+'`' if name != 'global' else 'general use'
|
|
|
|
|
|
class MyEncoder(json.JSONEncoder):
|
|
""" Simple JSON encoder for objects """
|
|
def default(self, o):
|
|
return o.__dict__
|
|
|
|
|
|
def print_dep_comments(dep, extra, ind):
|
|
if len(dep.comments) == 1:
|
|
print(f'- {extra}Note: '+dep.comments[0])
|
|
elif len(dep.comments) > 1:
|
|
print(f'- {extra}Notes:')
|
|
for comment in dep.comments:
|
|
print('{ind}- {extra}'+comment)
|
|
|
|
|
|
def compose_version(version, max_version):
|
|
ver = ' v'+'.'.join(map(str, version)) if version else ''
|
|
ver += ' (<'+'.'.join(map(str, max_version))+')' if max_version else ''
|
|
return ver
|
|
|
|
|
|
def print_needed(needed, extra):
|
|
if needed:
|
|
if len(needed) == 1:
|
|
if needed[0] == 'general use':
|
|
print(f'- {extra}Mandatory')
|
|
else:
|
|
print(f'- {extra}Mandatory for '+needed[0])
|
|
else:
|
|
print(f'- {extra}Mandatory for: '+', '.join(sorted(needed)))
|
|
|
|
|
|
def add_rst_image(img, url, rst_images):
|
|
rst_images.append((img, url))
|
|
return f' |image{len(rst_images)}|'
|
|
|
|
|
|
def print_dependencies(markdown=True, jsn=False, rst=False):
|
|
if rst:
|
|
print(RST_WARNING)
|
|
# Compute the importance of each dependency
|
|
for dep in RegDependency.get_registered().values():
|
|
importance = 0
|
|
for r in dep.roles:
|
|
local = r.output != 'global'
|
|
if r.mandatory:
|
|
importance += LOCAL_MANDATORY if local else GLOBAL_MANDATORY
|
|
else:
|
|
importance += LOCAL_OPTIONAL if local else GLOBAL_OPTIONAL
|
|
dep.importance = importance
|
|
# The JSON output is just a dump
|
|
if jsn:
|
|
print(json.dumps(RegDependency.get_registered(), cls=MyEncoder, indent=4, sort_keys=True))
|
|
return
|
|
# Now print them sorted by importance (and by name as a second criteria)
|
|
rst_images = []
|
|
for name, dep in sorted(sorted(RegDependency.get_registered().items(), key=lambda x: x[0].lower()), # noqa C414
|
|
key=lambda x: x[1].importance, reverse=True):
|
|
deb = ''
|
|
if markdown:
|
|
dtype = PY_LOGO if dep.is_python else TOOL_LOGO
|
|
is_pypi_dep = PYPI_LOGO if dep.pypi_name.lower() in __pypi_deps__ else ''
|
|
has_dowloader = ' '+AUTO_DOWN if dep.downloader is not None else ''
|
|
if dep.is_python:
|
|
url = 'https://pypi.org/project/{}/'.format(name)
|
|
if is_pypi_dep:
|
|
is_pypi_dep = ' [{}]({})'.format(is_pypi_dep, url)
|
|
else:
|
|
url = dep.url
|
|
name = '[**{}**]({})'.format(name, url)
|
|
dtype = ' [{}]({})'.format(dtype, url)
|
|
if dep.in_debian:
|
|
deb = ' [{}](https://packages.debian.org/stable/{})'.format(DEB_LOGO, dep.deb_package)
|
|
elif rst:
|
|
if dep.is_python:
|
|
main_url = f'https://pypi.org/project/{name}/'
|
|
else:
|
|
main_url = dep.url
|
|
dtype = add_rst_image(PY_LOGO_URL if dep.is_python else TOOL_LOGO_URL, main_url, rst_images)
|
|
is_pypi_dep = PYPI_LOGO_URL if dep.pypi_name.lower() else ''
|
|
if dep.pypi_name.lower() in __pypi_deps__:
|
|
if dep.is_python:
|
|
is_pypi_dep = add_rst_image(PYPI_LOGO_URL, f'https://pypi.org/project/{name}/', rst_images)
|
|
else:
|
|
is_pypi_dep = ' |PyPi dependency|'
|
|
else:
|
|
is_pypi_dep = ''
|
|
if dep.in_debian:
|
|
deb = add_rst_image(DEB_LOGO_URL, f'https://packages.debian.org/stable/{dep.deb_package}', rst_images)
|
|
has_dowloader = ' |Auto-download|' if dep.downloader is not None else ''
|
|
name = f"`{name} <{main_url}>`__ :index:`: <pair: dependency; {name}>` "
|
|
else:
|
|
dtype = ' (Python module)' if dep.is_python else ' (Tool)'
|
|
is_pypi_dep = ' (PyPi dependency)' if dep.pypi_name.lower() in __pypi_deps__ else ''
|
|
has_dowloader = ' (Auto-download)' if dep.downloader is not None else ''
|
|
if dep.in_debian:
|
|
deb = ' (Debian: {})'.format(dep.deb_package)
|
|
needed = []
|
|
optional = []
|
|
version = None
|
|
max_version = None
|
|
for r in dep.roles:
|
|
if r.mandatory:
|
|
needed.append(global2human(r.output))
|
|
else:
|
|
optional.append(r)
|
|
if r.version and (version is None or r.version > version):
|
|
version = r.version
|
|
if r.max_version and (max_version is None or r.max_version < max_version):
|
|
max_version = r.max_version
|
|
ver = compose_version(version, max_version)
|
|
print(name+ver+dtype+is_pypi_dep+deb+has_dowloader)
|
|
if rst:
|
|
print()
|
|
extra = ' '
|
|
ind = ' '
|
|
else:
|
|
extra = ''
|
|
ind = ' '
|
|
print_needed(needed, extra)
|
|
if optional:
|
|
if len(optional) == 1:
|
|
o = optional[0]
|
|
desc = o.desc[0].lower()+o.desc[1:]
|
|
print(f'- {extra}Optional to {desc} for {global2human(o.output)}')
|
|
else:
|
|
print(f'- {extra}Optional to:')
|
|
if rst:
|
|
print()
|
|
for o in optional:
|
|
ver = ''
|
|
if o.version:
|
|
ver = ' (v'+'.'.join(map(str, o.version))+')'
|
|
print(f'{ind}- {extra}{o.desc} for {global2human(o.output)}{ver}')
|
|
print_dep_comments(dep, extra, ind)
|
|
print()
|
|
# reST inline images are separated:
|
|
if rst_images:
|
|
print('.. |PyPi dependency| image:: '+PYPI_LOGO_URL)
|
|
print('.. |Auto-download| image:: '+AUTO_DOWN_URL)
|
|
for c, rst_img in enumerate(rst_images):
|
|
print(f'.. |image{c+1}| image:: {rst_img[0]}\n :target: {rst_img[1]}')
|
|
print()
|
|
|
|
|
|
def print_errors(rst):
|
|
make_title(rst, 'error levels', len(error_level_to_name), '~')
|
|
for c, n in enumerate(error_level_to_name):
|
|
print(f'- {c}: {n}')
|
|
|
|
|
|
def print_list_rotations():
|
|
rots = sorted(DEFAULT_ROTATIONS, key=lambda x: x[0])
|
|
w = len(max(rots, key=lambda x: len(x[0]))[0])+4
|
|
sep = '='*w + ' ========'
|
|
f1 = f'%-{w}s'
|
|
print(sep)
|
|
print((f1+' Rotation') % 'Footprint')
|
|
print(sep)
|
|
fmt = f'{f1} %6d'
|
|
for v in rots:
|
|
print(fmt % ('``'+v[0]+'``', v[1]))
|
|
print(sep)
|
|
|
|
|
|
def print_list_offsets():
|
|
offs = sorted(DEFAULT_OFFSETS, key=lambda x: x[0])
|
|
w = len(max(offs, key=lambda x: len(x[0]))[0])+4
|
|
sep = '='*w + ' ======== ========'
|
|
f1 = f'%-{w}s'
|
|
print(sep)
|
|
print((f1+' Offset X Offset Y') % 'Footprint')
|
|
print(sep)
|
|
fmt = f'{f1} %8.2f %8.2f'
|
|
for v in offs:
|
|
print(fmt % ('``'+v[0]+'``', v[1][0], v[1][1]))
|
|
print(sep)
|