|
@@ -35,18 +35,27 @@ def enabled_defines(filepath):
|
|
|
'''
|
|
|
outdict = {}
|
|
|
section = "user"
|
|
|
- spatt = re.compile(r".*@section +([-a-zA-Z0-9_\s]+)$") # must match @section ...
|
|
|
+ spatt = re.compile(r".*@section +([-a-zA-Z0-9_\s]+)$") # @section ...
|
|
|
|
|
|
f = open(filepath, encoding="utf8").read().split("\n")
|
|
|
|
|
|
- # Get the full contents of the file and remove all block comments.
|
|
|
- # This will avoid false positives from #defines in comments
|
|
|
- f = re.sub(r'/\*.*?\*/', '', '\n'.join(f), flags=re.DOTALL).split("\n")
|
|
|
-
|
|
|
+ incomment = False
|
|
|
for line in f:
|
|
|
sline = line.strip()
|
|
|
+
|
|
|
m = re.match(spatt, sline) # @section ...
|
|
|
if m: section = m.group(1).strip() ; continue
|
|
|
+
|
|
|
+ if incomment:
|
|
|
+ if '*/' in sline:
|
|
|
+ incomment = False
|
|
|
+ continue
|
|
|
+ else:
|
|
|
+ mpos, spos = sline.find('/*'), sline.find('//')
|
|
|
+ if mpos >= 0 and (spos < 0 or spos > mpos):
|
|
|
+ incomment = True
|
|
|
+ continue
|
|
|
+
|
|
|
if sline[:7] == "#define":
|
|
|
# Extract the key here (we don't care about the value)
|
|
|
kv = sline[8:].strip().split()
|
|
@@ -70,6 +79,11 @@ def compress_file(filepath, storedname, outpath):
|
|
|
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
|
|
zipf.write(filepath, arcname=storedname, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
|
|
|
|
|
+ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
|
|
|
+
|
|
|
+#
|
|
|
+# Compute a build signature and/or export the configuration
|
|
|
+#
|
|
|
def compute_build_signature(env):
|
|
|
'''
|
|
|
Compute the build signature by extracting all configuration settings and
|
|
@@ -81,11 +95,17 @@ def compute_build_signature(env):
|
|
|
env.Append(BUILD_SIGNATURE=1)
|
|
|
|
|
|
build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
|
|
- marlin_json = build_path / 'marlin_config.json'
|
|
|
+ json_name = 'marlin_config.json'
|
|
|
+ marlin_json = build_path / json_name
|
|
|
marlin_zip = build_path / 'mc.zip'
|
|
|
|
|
|
+ # ANSI colors
|
|
|
+ green = "\u001b[32m"
|
|
|
+ yellow = "\u001b[33m"
|
|
|
+ red = "\u001b[31m"
|
|
|
+
|
|
|
# Definitions from these files will be kept
|
|
|
- header_paths = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
|
|
+ header_paths = ('Marlin/Configuration.h', 'Marlin/Configuration_adv.h')
|
|
|
|
|
|
# Check if we can skip processing
|
|
|
hashes = ''
|
|
@@ -100,7 +120,7 @@ def compute_build_signature(env):
|
|
|
conf = json.load(infile)
|
|
|
same_hash = conf['__INITIAL_HASH'] == hashes
|
|
|
if same_hash:
|
|
|
- compress_file(marlin_json, 'marlin_config.json', marlin_zip)
|
|
|
+ compress_file(marlin_json, json_name, marlin_zip)
|
|
|
except:
|
|
|
pass
|
|
|
|
|
@@ -179,25 +199,28 @@ def compute_build_signature(env):
|
|
|
extended_dump = config_dump > 100
|
|
|
if extended_dump: config_dump -= 100
|
|
|
|
|
|
+ # Get the schema class for exports that require it
|
|
|
+ if config_dump in (3, 4) or (extended_dump and config_dump in (2, 5)):
|
|
|
+ try:
|
|
|
+ conf_schema = schema.extract()
|
|
|
+ except Exception as exc:
|
|
|
+ print(red + "Error: " + str(exc))
|
|
|
+ conf_schema = None
|
|
|
+
|
|
|
#
|
|
|
# Produce an INI file if CONFIG_EXPORT == 2
|
|
|
#
|
|
|
if config_dump == 2:
|
|
|
- print("Generating config.ini ...")
|
|
|
+ print(yellow + "Generating config.ini ...")
|
|
|
|
|
|
ini_fmt = '{0:40} = {1}'
|
|
|
ext_fmt = '{0:40} {1}'
|
|
|
- ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
|
|
|
|
|
|
if extended_dump:
|
|
|
# Extended export will dump config options by section
|
|
|
|
|
|
# We'll use Schema class to get the sections
|
|
|
- try:
|
|
|
- conf_schema = schema.extract()
|
|
|
- except Exception as exc:
|
|
|
- print("Error: " + str(exc))
|
|
|
- exit(1)
|
|
|
+ if not conf_schema: exit(1)
|
|
|
|
|
|
# Then group options by schema @section
|
|
|
sections = {}
|
|
@@ -305,27 +328,22 @@ f'''#
|
|
|
for header in real_config:
|
|
|
outfile.write(f'\n[{filegrp[header]}]\n')
|
|
|
for name in sorted(real_config[header]):
|
|
|
- if name not in ignore:
|
|
|
- val = real_config[header][name]['value']
|
|
|
- if val == '': val = 'on'
|
|
|
- outfile.write(ini_fmt.format(name.lower(), val) + '\n')
|
|
|
+ if name in ignore: continue
|
|
|
+ val = real_config[header][name]['value']
|
|
|
+ if val == '': val = 'on'
|
|
|
+ outfile.write(ini_fmt.format(name.lower(), val) + '\n')
|
|
|
|
|
|
#
|
|
|
# CONFIG_EXPORT 3 = schema.json, 4 = schema.yml
|
|
|
#
|
|
|
- if config_dump >= 3:
|
|
|
- try:
|
|
|
- conf_schema = schema.extract()
|
|
|
- except Exception as exc:
|
|
|
- print("Error: " + str(exc))
|
|
|
- conf_schema = None
|
|
|
+ if config_dump in (3, 4):
|
|
|
|
|
|
if conf_schema:
|
|
|
#
|
|
|
# 3 = schema.json
|
|
|
#
|
|
|
if config_dump in (3, 13):
|
|
|
- print("Generating schema.json ...")
|
|
|
+ print(yellow + "Generating schema.json ...")
|
|
|
schema.dump_json(conf_schema, build_path / 'schema.json')
|
|
|
if config_dump == 13:
|
|
|
schema.group_options(conf_schema)
|
|
@@ -335,7 +353,7 @@ f'''#
|
|
|
# 4 = schema.yml
|
|
|
#
|
|
|
elif config_dump == 4:
|
|
|
- print("Generating schema.yml ...")
|
|
|
+ print(yellow + "Generating schema.yml ...")
|
|
|
try:
|
|
|
import yaml
|
|
|
except ImportError:
|
|
@@ -355,7 +373,7 @@ f'''#
|
|
|
|
|
|
json_data = {}
|
|
|
if extended_dump:
|
|
|
- print("Extended dump ...")
|
|
|
+ print(yellow + "Extended dump ...")
|
|
|
for header in real_config:
|
|
|
confs = real_config[header]
|
|
|
json_data[header] = {}
|
|
@@ -395,7 +413,7 @@ f'''#
|
|
|
|
|
|
# Compress the JSON file as much as we can
|
|
|
if not same_hash:
|
|
|
- compress_file(marlin_json, 'marlin_config.json', marlin_zip)
|
|
|
+ compress_file(marlin_json, json_name, marlin_zip)
|
|
|
|
|
|
# Generate a C source file containing the entire ZIP file as an array
|
|
|
with open('Marlin/src/mczip.h','wb') as result_file:
|