Browse Source

🧑‍💻 Use spaces indent for Python

Scott Lahteine 2 years ago
parent
commit
306e03b03b

+ 5 - 1
.editorconfig

@@ -14,6 +14,10 @@ end_of_line = lf
 indent_style = space
 indent_size = 2
 
-[{*.py,*.conf,*.sublime-project}]
+[{*.py}]
+indent_style = space
+indent_size = 4
+
+[{*.conf,*.sublime-project}]
 indent_style = tab
 indent_size = 4

+ 112 - 112
Marlin/src/HAL/LPC1768/upload_extra_script.py

@@ -9,127 +9,127 @@ from __future__ import print_function
 import pioutil
 if pioutil.is_pio_build():
 
-	target_filename = "FIRMWARE.CUR"
-	target_drive = "REARM"
+    target_filename = "FIRMWARE.CUR"
+    target_drive = "REARM"
 
-	import platform
+    import platform
 
-	current_OS = platform.system()
-	Import("env")
+    current_OS = platform.system()
+    Import("env")
 
-	def print_error(e):
-		print('\nUnable to find destination disk (%s)\n' \
-			  'Please select it in platformio.ini using the upload_port keyword ' \
-			  '(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
-			  'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
-			  %(e, env.get('PIOENV')))
+    def print_error(e):
+        print('\nUnable to find destination disk (%s)\n' \
+              'Please select it in platformio.ini using the upload_port keyword ' \
+              '(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
+              'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
+              %(e, env.get('PIOENV')))
 
-	def before_upload(source, target, env):
-		try:
-			from pathlib import Path
-  			#
-			# Find a disk for upload
-			#
-			upload_disk = 'Disk not found'
-			target_file_found = False
-			target_drive_found = False
-			if current_OS == 'Windows':
-				#
-				# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
-				#   Windows - doesn't care about the disk's name, only cares about the drive letter
-				import subprocess,string
-				from ctypes import windll
-				from pathlib import PureWindowsPath
+    def before_upload(source, target, env):
+        try:
+            from pathlib import Path
+            #
+            # Find a disk for upload
+            #
+            upload_disk = 'Disk not found'
+            target_file_found = False
+            target_drive_found = False
+            if current_OS == 'Windows':
+                #
+                # platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
+                #   Windows - doesn't care about the disk's name, only cares about the drive letter
+                import subprocess,string
+                from ctypes import windll
+                from pathlib import PureWindowsPath
 
-				# getting list of drives
-				# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
-				drives = []
-				bitmask = windll.kernel32.GetLogicalDrives()
-				for letter in string.ascii_uppercase:
-					if bitmask & 1:
-						drives.append(letter)
-					bitmask >>= 1
+                # getting list of drives
+                # https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
+                drives = []
+                bitmask = windll.kernel32.GetLogicalDrives()
+                for letter in string.ascii_uppercase:
+                    if bitmask & 1:
+                        drives.append(letter)
+                    bitmask >>= 1
 
-				for drive in drives:
-					final_drive_name = drive + ':'
-					# print ('disc check: {}'.format(final_drive_name))
-					try:
-						volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
-					except Exception as e:
-						print ('error:{}'.format(e))
-						continue
-					else:
-						if target_drive in volume_info and not target_file_found:  # set upload if not found target file yet
-							target_drive_found = True
-							upload_disk = PureWindowsPath(final_drive_name)
-						if target_filename in volume_info:
-							if not target_file_found:
-								upload_disk = PureWindowsPath(final_drive_name)
-							target_file_found = True
+                for drive in drives:
+                    final_drive_name = drive + ':'
+                    # print ('disc check: {}'.format(final_drive_name))
+                    try:
+                        volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
+                    except Exception as e:
+                        print ('error:{}'.format(e))
+                        continue
+                    else:
+                        if target_drive in volume_info and not target_file_found:  # set upload if not found target file yet
+                            target_drive_found = True
+                            upload_disk = PureWindowsPath(final_drive_name)
+                        if target_filename in volume_info:
+                            if not target_file_found:
+                                upload_disk = PureWindowsPath(final_drive_name)
+                            target_file_found = True
 
-			elif current_OS == 'Linux':
-				#
-				# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
-				#
-				import getpass
-				user = getpass.getuser()
-				mpath = Path('media', user)
-				drives = [ x for x in mpath.iterdir() if x.is_dir() ]
-				if target_drive in drives:  # If target drive is found, use it.
-					target_drive_found = True
-					upload_disk = mpath / target_drive
-				else:
-					for drive in drives:
-						try:
-							fpath = mpath / drive
-							filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
-						except:
-							continue
-						else:
-							if target_filename in filenames:
-								upload_disk = mpath / drive
-								target_file_found = True
-								break
-				#
-				# set upload_port to drive if found
-				#
+            elif current_OS == 'Linux':
+                #
+                # platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
+                #
+                import getpass
+                user = getpass.getuser()
+                mpath = Path('media', user)
+                drives = [ x for x in mpath.iterdir() if x.is_dir() ]
+                if target_drive in drives:  # If target drive is found, use it.
+                    target_drive_found = True
+                    upload_disk = mpath / target_drive
+                else:
+                    for drive in drives:
+                        try:
+                            fpath = mpath / drive
+                            filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
+                        except:
+                            continue
+                        else:
+                            if target_filename in filenames:
+                                upload_disk = mpath / drive
+                                target_file_found = True
+                                break
+                #
+                # set upload_port to drive if found
+                #
 
-				if target_file_found or target_drive_found:
-					env.Replace(
-						UPLOAD_FLAGS="-P$UPLOAD_PORT"
-					)
+                if target_file_found or target_drive_found:
+                    env.Replace(
+                        UPLOAD_FLAGS="-P$UPLOAD_PORT"
+                    )
 
-			elif current_OS == 'Darwin':  # MAC
-				#
-				# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
-				#
-				dpath = Path('/Volumes')  # human readable names
-				drives = [ x for x in dpath.iterdir() if x.is_dir() ]
-				if target_drive in drives and not target_file_found:  # set upload if not found target file yet
-					target_drive_found = True
-					upload_disk = dpath / target_drive
-				for drive in drives:
-					try:
-						fpath = dpath / drive	# will get an error if the drive is protected
-						filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
-					except:
-						continue
-					else:
-						if target_filename in filenames:
-							upload_disk = dpath / drive
-							target_file_found = True
-							break
+            elif current_OS == 'Darwin':  # MAC
+                #
+                # platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
+                #
+                dpath = Path('/Volumes')  # human readable names
+                drives = [ x for x in dpath.iterdir() if x.is_dir() ]
+                if target_drive in drives and not target_file_found:  # set upload if not found target file yet
+                    target_drive_found = True
+                    upload_disk = dpath / target_drive
+                for drive in drives:
+                    try:
+                        fpath = dpath / drive   # will get an error if the drive is protected
+                        filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
+                    except:
+                        continue
+                    else:
+                        if target_filename in filenames:
+                            upload_disk = dpath / drive
+                            target_file_found = True
+                            break
 
-			#
-			# Set upload_port to drive if found
-			#
-			if target_file_found or target_drive_found:
-				env.Replace(UPLOAD_PORT=str(upload_disk))
-				print('\nUpload disk: ', upload_disk, '\n')
-			else:
-				print_error('Autodetect Error')
+            #
+            # Set upload_port to drive if found
+            #
+            if target_file_found or target_drive_found:
+                env.Replace(UPLOAD_PORT=str(upload_disk))
+                print('\nUpload disk: ', upload_disk, '\n')
+            else:
+                print_error('Autodetect Error')
 
-		except Exception as e:
-			print_error(str(e))
+        except Exception as e:
+            print_error(str(e))
 
-	env.AddPreAction("upload", before_upload)
+    env.AddPreAction("upload", before_upload)

+ 1 - 1
Marlin/src/feature/spindle_laser.h

@@ -285,7 +285,7 @@ public:
           if (!menuPower) menuPower = cpwr_to_upwr(SPEED_POWER_STARTUP);
           power = upower_to_ocr(menuPower);
           apply_power(power);
-        } else 
+        } else
           apply_power(0);
       }
 

+ 10 - 10
buildroot/share/PlatformIO/scripts/SAMD51_grandcentral_m4.py

@@ -4,17 +4,17 @@
 #
 import pioutil
 if pioutil.is_pio_build():
-	from os.path import join, isfile
-	import shutil
+    from os.path import join, isfile
+    import shutil
 
-	Import("env")
+    Import("env")
 
-	mf = env["MARLIN_FEATURES"]
-	rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
-	txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
+    mf = env["MARLIN_FEATURES"]
+    rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
+    txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
 
-	serialBuf = str(max(int(rxBuf), int(txBuf), 350))
+    serialBuf = str(max(int(rxBuf), int(txBuf), 350))
 
-	build_flags = env.get('BUILD_FLAGS')
-	build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
-	env.Replace(BUILD_FLAGS=build_flags)
+    build_flags = env.get('BUILD_FLAGS')
+    build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
+    env.Replace(BUILD_FLAGS=build_flags)

+ 87 - 87
buildroot/share/PlatformIO/scripts/chitu_crypt.py

@@ -4,123 +4,123 @@
 #
 import pioutil
 if pioutil.is_pio_build():
-	import struct,uuid,marlin
+    import struct,uuid,marlin
 
-	board = marlin.env.BoardConfig()
+    board = marlin.env.BoardConfig()
 
-	def calculate_crc(contents, seed):
-		accumulating_xor_value = seed;
+    def calculate_crc(contents, seed):
+        accumulating_xor_value = seed;
 
-		for i in range(0, len(contents), 4):
-			value = struct.unpack('<I', contents[ i : i + 4])[0]
-			accumulating_xor_value = accumulating_xor_value ^ value
-		return accumulating_xor_value
+        for i in range(0, len(contents), 4):
+            value = struct.unpack('<I', contents[ i : i + 4])[0]
+            accumulating_xor_value = accumulating_xor_value ^ value
+        return accumulating_xor_value
 
-	def xor_block(r0, r1, block_number, block_size, file_key):
-		# This is the loop counter
-		loop_counter = 0x0
+    def xor_block(r0, r1, block_number, block_size, file_key):
+        # This is the loop counter
+        loop_counter = 0x0
 
-		# This is the key length
-		key_length = 0x18
+        # This is the key length
+        key_length = 0x18
 
-		# This is an initial seed
-		xor_seed = 0x4BAD
+        # This is an initial seed
+        xor_seed = 0x4BAD
 
-		# This is the block counter
-		block_number = xor_seed * block_number
+        # This is the block counter
+        block_number = xor_seed * block_number
 
-		#load the xor key from the file
-		r7 =  file_key
+        #load the xor key from the file
+        r7 =  file_key
 
-		for loop_counter in range(0, block_size):
-			# meant to make sure different bits of the key are used.
-			xor_seed = int(loop_counter / key_length)
+        for loop_counter in range(0, block_size):
+            # meant to make sure different bits of the key are used.
+            xor_seed = int(loop_counter / key_length)
 
-			# IP is a scratch register / R12
-			ip = loop_counter - (key_length * xor_seed)
+            # IP is a scratch register / R12
+            ip = loop_counter - (key_length * xor_seed)
 
-			# xor_seed = (loop_counter * loop_counter) + block_number
-			xor_seed = (loop_counter * loop_counter) + block_number
+            # xor_seed = (loop_counter * loop_counter) + block_number
+            xor_seed = (loop_counter * loop_counter) + block_number
 
-			# shift the xor_seed left by the bits in IP.
-			xor_seed = xor_seed >> ip
+            # shift the xor_seed left by the bits in IP.
+            xor_seed = xor_seed >> ip
 
-			# load a byte into IP
-			ip = r0[loop_counter]
+            # load a byte into IP
+            ip = r0[loop_counter]
 
-			# XOR the seed with r7
-			xor_seed = xor_seed ^ r7
+            # XOR the seed with r7
+            xor_seed = xor_seed ^ r7
 
-			# and then with IP
-			xor_seed = xor_seed ^ ip
+            # and then with IP
+            xor_seed = xor_seed ^ ip
 
-			#Now store the byte back
-			r1[loop_counter] = xor_seed & 0xFF
+            #Now store the byte back
+            r1[loop_counter] = xor_seed & 0xFF
 
-			#increment the loop_counter
-			loop_counter = loop_counter + 1
+            #increment the loop_counter
+            loop_counter = loop_counter + 1
 
-	def encrypt_file(input, output_file, file_length):
-		input_file = bytearray(input.read())
-		block_size = 0x800
-		key_length = 0x18
+    def encrypt_file(input, output_file, file_length):
+        input_file = bytearray(input.read())
+        block_size = 0x800
+        key_length = 0x18
 
-		uid_value = uuid.uuid4()
-		file_key = int(uid_value.hex[0:8], 16)
+        uid_value = uuid.uuid4()
+        file_key = int(uid_value.hex[0:8], 16)
 
-		xor_crc = 0xEF3D4323;
+        xor_crc = 0xEF3D4323;
 
-		# the input file is exepcted to be in chunks of 0x800
-		# so round the size
-		while len(input_file) % block_size != 0:
-			input_file.extend(b'0x0')
+        # the input file is exepcted to be in chunks of 0x800
+        # so round the size
+        while len(input_file) % block_size != 0:
+            input_file.extend(b'0x0')
 
-		# write the file header
-		output_file.write(struct.pack(">I", 0x443D2D3F))
-		# encrypt the contents using a known file header key
+        # write the file header
+        output_file.write(struct.pack(">I", 0x443D2D3F))
+        # encrypt the contents using a known file header key
 
-		# write the file_key
-		output_file.write(struct.pack("<I", file_key))
+        # write the file_key
+        output_file.write(struct.pack("<I", file_key))
 
-		#TODO - how to enforce that the firmware aligns to block boundaries?
-		block_count = int(len(input_file) / block_size)
-		print ("Block Count is ", block_count)
-		for block_number in range(0, block_count):
-			block_offset = (block_number * block_size)
-			block_end = block_offset + block_size
-			block_array = bytearray(input_file[block_offset: block_end])
-			xor_block(block_array, block_array, block_number, block_size, file_key)
-			for n in range (0, block_size):
-				input_file[block_offset + n] = block_array[n]
+        #TODO - how to enforce that the firmware aligns to block boundaries?
+        block_count = int(len(input_file) / block_size)
+        print ("Block Count is ", block_count)
+        for block_number in range(0, block_count):
+            block_offset = (block_number * block_size)
+            block_end = block_offset + block_size
+            block_array = bytearray(input_file[block_offset: block_end])
+            xor_block(block_array, block_array, block_number, block_size, file_key)
+            for n in range (0, block_size):
+                input_file[block_offset + n] = block_array[n]
 
-			# update the expected CRC value.
-			xor_crc = calculate_crc(block_array, xor_crc)
+            # update the expected CRC value.
+            xor_crc = calculate_crc(block_array, xor_crc)
 
-		# write CRC
-		output_file.write(struct.pack("<I", xor_crc))
+        # write CRC
+        output_file.write(struct.pack("<I", xor_crc))
 
-		# finally, append the encrypted results.
-		output_file.write(input_file)
-		return
+        # finally, append the encrypted results.
+        output_file.write(input_file)
+        return
 
-	# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
-	def encrypt(source, target, env):
-		from pathlib import Path
+    # Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
+    def encrypt(source, target, env):
+        from pathlib import Path
 
-		fwpath = Path(target[0].path)
-		fwsize = fwpath.stat().st_size
+        fwpath = Path(target[0].path)
+        fwsize = fwpath.stat().st_size
 
-		enname = board.get("build.crypt_chitu")
-		enpath = Path(target[0].dir.path)
+        enname = board.get("build.crypt_chitu")
+        enpath = Path(target[0].dir.path)
 
-		fwfile = fwpath.open("rb")
-		enfile = (enpath / enname).open("wb")
+        fwfile = fwpath.open("rb")
+        enfile = (enpath / enname).open("wb")
 
-		print(f"Encrypting {fwpath} to {enname}")
-		encrypt_file(fwfile, enfile, fwsize)
-		fwfile.close()
-		enfile.close()
-		fwpath.unlink()
+        print(f"Encrypting {fwpath} to {enname}")
+        encrypt_file(fwfile, enfile, fwsize)
+        fwfile.close()
+        enfile.close()
+        fwpath.unlink()
 
-	marlin.relocate_firmware("0x08008800")
-	marlin.add_post_action(encrypt);
+    marlin.relocate_firmware("0x08008800")
+    marlin.add_post_action(encrypt);

+ 8 - 8
buildroot/share/PlatformIO/scripts/common-dependencies-post.py

@@ -4,13 +4,13 @@
 #
 import pioutil
 if pioutil.is_pio_build():
-	Import("env", "projenv")
+    Import("env", "projenv")
 
-	def apply_board_build_flags():
-		if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
-			return
-		projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
+    def apply_board_build_flags():
+        if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
+            return
+        projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
 
-	# We need to add the board build flags in a post script
-	# so the platform build script doesn't overwrite the custom CCFLAGS
-	apply_board_build_flags()
+    # We need to add the board build flags in a post script
+    # so the platform build script doesn't overwrite the custom CCFLAGS
+    apply_board_build_flags()

+ 244 - 244
buildroot/share/PlatformIO/scripts/common-dependencies.py

@@ -5,247 +5,247 @@
 import pioutil
 if pioutil.is_pio_build():
 
-	import subprocess,os,re
-	Import("env")
-
-	from platformio.package.meta import PackageSpec
-	from platformio.project.config import ProjectConfig
-
-	verbose = 0
-	FEATURE_CONFIG = {}
-
-	def validate_pio():
-		PIO_VERSION_MIN = (6, 0, 1)
-		try:
-			from platformio import VERSION as PIO_VERSION
-			weights = (1000, 100, 1)
-			version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
-			version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
-			if version_cur < version_min:
-				print()
-				print("**************************************************")
-				print("******      An update to PlatformIO is      ******")
-				print("******  required to build Marlin Firmware.  ******")
-				print("******                                      ******")
-				print("******      Minimum version: ", PIO_VERSION_MIN, "    ******")
-				print("******      Current Version: ", PIO_VERSION, "    ******")
-				print("******                                      ******")
-				print("******   Update PlatformIO and try again.   ******")
-				print("**************************************************")
-				print()
-				exit(1)
-		except SystemExit:
-			exit(1)
-		except:
-			print("Can't detect PlatformIO Version")
-
-	def blab(str,level=1):
-		if verbose >= level:
-			print("[deps] %s" % str)
-
-	def add_to_feat_cnf(feature, flines):
-
-		try:
-			feat = FEATURE_CONFIG[feature]
-		except:
-			FEATURE_CONFIG[feature] = {}
-
-		# Get a reference to the FEATURE_CONFIG under construction
-		feat = FEATURE_CONFIG[feature]
-
-		# Split up passed lines on commas or newlines and iterate
-		# Add common options to the features config under construction
-		# For lib_deps replace a previous instance of the same library
-		atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
-		for line in atoms:
-			parts = line.split('=')
-			name = parts.pop(0)
-			if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
-				feat[name] = '='.join(parts)
-				blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
-			else:
-				for dep in re.split(r',\s*', line):
-					lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
-					lib_re = re.compile('(?!^' + lib_name + '\\b)')
-					feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
-					blab("[%s] lib_deps = %s" % (feature, dep), 3)
-
-	def load_features():
-		blab("========== Gather [features] entries...")
-		for key in ProjectConfig().items('features'):
-			feature = key[0].upper()
-			if not feature in FEATURE_CONFIG:
-				FEATURE_CONFIG[feature] = { 'lib_deps': [] }
-			add_to_feat_cnf(feature, key[1])
-
-		# Add options matching custom_marlin.MY_OPTION to the pile
-		blab("========== Gather custom_marlin entries...")
-		for n in env.GetProjectOptions():
-			key = n[0]
-			mat = re.match(r'custom_marlin\.(.+)', key)
-			if mat:
-				try:
-					val = env.GetProjectOption(key)
-				except:
-					val = None
-				if val:
-					opt = mat[1].upper()
-					blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
-					add_to_feat_cnf(opt, val)
-
-	def get_all_known_libs():
-		known_libs = []
-		for feature in FEATURE_CONFIG:
-			feat = FEATURE_CONFIG[feature]
-			if not 'lib_deps' in feat:
-				continue
-			for dep in feat['lib_deps']:
-				known_libs.append(PackageSpec(dep).name)
-		return known_libs
-
-	def get_all_env_libs():
-		env_libs = []
-		lib_deps = env.GetProjectOption('lib_deps')
-		for dep in lib_deps:
-			env_libs.append(PackageSpec(dep).name)
-		return env_libs
-
-	def set_env_field(field, value):
-		proj = env.GetProjectConfig()
-		proj.set("env:" + env['PIOENV'], field, value)
-
-	# All unused libs should be ignored so that if a library
-	# exists in .pio/lib_deps it will not break compilation.
-	def force_ignore_unused_libs():
-		env_libs = get_all_env_libs()
-		known_libs = get_all_known_libs()
-		diff = (list(set(known_libs) - set(env_libs)))
-		lib_ignore = env.GetProjectOption('lib_ignore') + diff
-		blab("Ignore libraries: %s" % lib_ignore)
-		set_env_field('lib_ignore', lib_ignore)
-
-	def apply_features_config():
-		load_features()
-		blab("========== Apply enabled features...")
-		for feature in FEATURE_CONFIG:
-			if not env.MarlinHas(feature):
-				continue
-
-			feat = FEATURE_CONFIG[feature]
-
-			if 'lib_deps' in feat and len(feat['lib_deps']):
-				blab("========== Adding lib_deps for %s... " % feature, 2)
-
-				# feat to add
-				deps_to_add = {}
-				for dep in feat['lib_deps']:
-					deps_to_add[PackageSpec(dep).name] = dep
-					blab("==================== %s... " % dep, 2)
-
-				# Does the env already have the dependency?
-				deps = env.GetProjectOption('lib_deps')
-				for dep in deps:
-					name = PackageSpec(dep).name
-					if name in deps_to_add:
-						del deps_to_add[name]
-
-				# Are there any libraries that should be ignored?
-				lib_ignore = env.GetProjectOption('lib_ignore')
-				for dep in deps:
-					name = PackageSpec(dep).name
-					if name in deps_to_add:
-						del deps_to_add[name]
-
-				# Is there anything left?
-				if len(deps_to_add) > 0:
-					# Only add the missing dependencies
-					set_env_field('lib_deps', deps + list(deps_to_add.values()))
-
-			if 'build_flags' in feat:
-				f = feat['build_flags']
-				blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
-				new_flags = env.GetProjectOption('build_flags') + [ f ]
-				env.Replace(BUILD_FLAGS=new_flags)
-
-			if 'extra_scripts' in feat:
-				blab("Running extra_scripts for %s... " % feature, 2)
-				env.SConscript(feat['extra_scripts'], exports="env")
-
-			if 'src_filter' in feat:
-				blab("========== Adding build_src_filter for %s... " % feature, 2)
-				src_filter = ' '.join(env.GetProjectOption('src_filter'))
-				# first we need to remove the references to the same folder
-				my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
-				cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
-				for d in my_srcs:
-					if d in cur_srcs:
-						src_filter = re.sub(r'[+-]' + d, '', src_filter)
-
-				src_filter = feat['src_filter'] + ' ' + src_filter
-				set_env_field('build_src_filter', [src_filter])
-				env.Replace(SRC_FILTER=src_filter)
-
-			if 'lib_ignore' in feat:
-				blab("========== Adding lib_ignore for %s... " % feature, 2)
-				lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
-				set_env_field('lib_ignore', lib_ignore)
-
-	#
-	# Use the compiler to get a list of all enabled features
-	#
-	def load_marlin_features():
-		if 'MARLIN_FEATURES' in env:
-			return
-
-		# Process defines
-		from preprocessor import run_preprocessor
-		define_list = run_preprocessor(env)
-		marlin_features = {}
-		for define in define_list:
-			feature = define[8:].strip().decode().split(' ')
-			feature, definition = feature[0], ' '.join(feature[1:])
-			marlin_features[feature] = definition
-		env['MARLIN_FEATURES'] = marlin_features
-
-	#
-	# Return True if a matching feature is enabled
-	#
-	def MarlinHas(env, feature):
-		load_marlin_features()
-		r = re.compile('^' + feature + '$')
-		found = list(filter(r.match, env['MARLIN_FEATURES']))
-
-		# Defines could still be 'false' or '0', so check
-		some_on = False
-		if len(found):
-			for f in found:
-				val = env['MARLIN_FEATURES'][f]
-				if val in [ '', '1', 'true' ]:
-					some_on = True
-				elif val in env['MARLIN_FEATURES']:
-					some_on = env.MarlinHas(val)
-
-		return some_on
-
-	validate_pio()
-
-	try:
-		verbose = int(env.GetProjectOption('custom_verbose'))
-	except:
-		pass
-
-	#
-	# Add a method for other PIO scripts to query enabled features
-	#
-	env.AddMethod(MarlinHas)
-
-	#
-	# Add dependencies for enabled Marlin features
-	#
-	apply_features_config()
-	force_ignore_unused_libs()
-
-	#print(env.Dump())
-
-	from signature import compute_build_signature
-	compute_build_signature(env)
+    import subprocess,os,re
+    Import("env")
+
+    from platformio.package.meta import PackageSpec
+    from platformio.project.config import ProjectConfig
+
+    verbose = 0
+    FEATURE_CONFIG = {}
+
+    def validate_pio():
+        PIO_VERSION_MIN = (6, 0, 1)
+        try:
+            from platformio import VERSION as PIO_VERSION
+            weights = (1000, 100, 1)
+            version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
+            version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
+            if version_cur < version_min:
+                print()
+                print("**************************************************")
+                print("******      An update to PlatformIO is      ******")
+                print("******  required to build Marlin Firmware.  ******")
+                print("******                                      ******")
+                print("******      Minimum version: ", PIO_VERSION_MIN, "    ******")
+                print("******      Current Version: ", PIO_VERSION, "    ******")
+                print("******                                      ******")
+                print("******   Update PlatformIO and try again.   ******")
+                print("**************************************************")
+                print()
+                exit(1)
+        except SystemExit:
+            exit(1)
+        except:
+            print("Can't detect PlatformIO Version")
+
+    def blab(str,level=1):
+        if verbose >= level:
+            print("[deps] %s" % str)
+
+    def add_to_feat_cnf(feature, flines):
+
+        try:
+            feat = FEATURE_CONFIG[feature]
+        except:
+            FEATURE_CONFIG[feature] = {}
+
+        # Get a reference to the FEATURE_CONFIG under construction
+        feat = FEATURE_CONFIG[feature]
+
+        # Split up passed lines on commas or newlines and iterate
+        # Add common options to the features config under construction
+        # For lib_deps replace a previous instance of the same library
+        atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
+        for line in atoms:
+            parts = line.split('=')
+            name = parts.pop(0)
+            if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
+                feat[name] = '='.join(parts)
+                blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
+            else:
+                for dep in re.split(r',\s*', line):
+                    lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
+                    lib_re = re.compile('(?!^' + lib_name + '\\b)')
+                    feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
+                    blab("[%s] lib_deps = %s" % (feature, dep), 3)
+
+    def load_features():
+        blab("========== Gather [features] entries...")
+        for key in ProjectConfig().items('features'):
+            feature = key[0].upper()
+            if not feature in FEATURE_CONFIG:
+                FEATURE_CONFIG[feature] = { 'lib_deps': [] }
+            add_to_feat_cnf(feature, key[1])
+
+        # Add options matching custom_marlin.MY_OPTION to the pile
+        blab("========== Gather custom_marlin entries...")
+        for n in env.GetProjectOptions():
+            key = n[0]
+            mat = re.match(r'custom_marlin\.(.+)', key)
+            if mat:
+                try:
+                    val = env.GetProjectOption(key)
+                except:
+                    val = None
+                if val:
+                    opt = mat[1].upper()
+                    blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
+                    add_to_feat_cnf(opt, val)
+
+    def get_all_known_libs():
+        known_libs = []
+        for feature in FEATURE_CONFIG:
+            feat = FEATURE_CONFIG[feature]
+            if not 'lib_deps' in feat:
+                continue
+            for dep in feat['lib_deps']:
+                known_libs.append(PackageSpec(dep).name)
+        return known_libs
+
+    def get_all_env_libs():
+        env_libs = []
+        lib_deps = env.GetProjectOption('lib_deps')
+        for dep in lib_deps:
+            env_libs.append(PackageSpec(dep).name)
+        return env_libs
+
+    def set_env_field(field, value):
+        proj = env.GetProjectConfig()
+        proj.set("env:" + env['PIOENV'], field, value)
+
+    # All unused libs should be ignored so that if a library
+    # exists in .pio/lib_deps it will not break compilation.
+    def force_ignore_unused_libs():
+        env_libs = get_all_env_libs()
+        known_libs = get_all_known_libs()
+        diff = (list(set(known_libs) - set(env_libs)))
+        lib_ignore = env.GetProjectOption('lib_ignore') + diff
+        blab("Ignore libraries: %s" % lib_ignore)
+        set_env_field('lib_ignore', lib_ignore)
+
+    def apply_features_config():
+        load_features()
+        blab("========== Apply enabled features...")
+        for feature in FEATURE_CONFIG:
+            if not env.MarlinHas(feature):
+                continue
+
+            feat = FEATURE_CONFIG[feature]
+
+            if 'lib_deps' in feat and len(feat['lib_deps']):
+                blab("========== Adding lib_deps for %s... " % feature, 2)
+
+                # feat to add
+                deps_to_add = {}
+                for dep in feat['lib_deps']:
+                    deps_to_add[PackageSpec(dep).name] = dep
+                    blab("==================== %s... " % dep, 2)
+
+                # Does the env already have the dependency?
+                deps = env.GetProjectOption('lib_deps')
+                for dep in deps:
+                    name = PackageSpec(dep).name
+                    if name in deps_to_add:
+                        del deps_to_add[name]
+
+                # Are there any libraries that should be ignored?
+                lib_ignore = env.GetProjectOption('lib_ignore')
+                for dep in deps:
+                    name = PackageSpec(dep).name
+                    if name in deps_to_add:
+                        del deps_to_add[name]
+
+                # Is there anything left?
+                if len(deps_to_add) > 0:
+                    # Only add the missing dependencies
+                    set_env_field('lib_deps', deps + list(deps_to_add.values()))
+
+            if 'build_flags' in feat:
+                f = feat['build_flags']
+                blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
+                new_flags = env.GetProjectOption('build_flags') + [ f ]
+                env.Replace(BUILD_FLAGS=new_flags)
+
+            if 'extra_scripts' in feat:
+                blab("Running extra_scripts for %s... " % feature, 2)
+                env.SConscript(feat['extra_scripts'], exports="env")
+
+            if 'src_filter' in feat:
+                blab("========== Adding build_src_filter for %s... " % feature, 2)
+                src_filter = ' '.join(env.GetProjectOption('src_filter'))
+                # first we need to remove the references to the same folder
+                my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
+                cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
+                for d in my_srcs:
+                    if d in cur_srcs:
+                        src_filter = re.sub(r'[+-]' + d, '', src_filter)
+
+                src_filter = feat['src_filter'] + ' ' + src_filter
+                set_env_field('build_src_filter', [src_filter])
+                env.Replace(SRC_FILTER=src_filter)
+
+            if 'lib_ignore' in feat:
+                blab("========== Adding lib_ignore for %s... " % feature, 2)
+                lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
+                set_env_field('lib_ignore', lib_ignore)
+
+    #
+    # Use the compiler to get a list of all enabled features
+    #
+    def load_marlin_features():
+        if 'MARLIN_FEATURES' in env:
+            return
+
+        # Process defines
+        from preprocessor import run_preprocessor
+        define_list = run_preprocessor(env)
+        marlin_features = {}
+        for define in define_list:
+            feature = define[8:].strip().decode().split(' ')
+            feature, definition = feature[0], ' '.join(feature[1:])
+            marlin_features[feature] = definition
+        env['MARLIN_FEATURES'] = marlin_features
+
+    #
+    # Return True if a matching feature is enabled
+    #
+    def MarlinHas(env, feature):
+        load_marlin_features()
+        r = re.compile('^' + feature + '$')
+        found = list(filter(r.match, env['MARLIN_FEATURES']))
+
+        # Defines could still be 'false' or '0', so check
+        some_on = False
+        if len(found):
+            for f in found:
+                val = env['MARLIN_FEATURES'][f]
+                if val in [ '', '1', 'true' ]:
+                    some_on = True
+                elif val in env['MARLIN_FEATURES']:
+                    some_on = env.MarlinHas(val)
+
+        return some_on
+
+    validate_pio()
+
+    try:
+        verbose = int(env.GetProjectOption('custom_verbose'))
+    except:
+        pass
+
+    #
+    # Add a method for other PIO scripts to query enabled features
+    #
+    env.AddMethod(MarlinHas)
+
+    #
+    # Add dependencies for enabled Marlin features
+    #
+    apply_features_config()
+    force_ignore_unused_libs()
+
+    #print(env.Dump())
+
+    from signature import compute_build_signature
+    compute_build_signature(env)

+ 194 - 194
buildroot/share/PlatformIO/scripts/configuration.py

@@ -7,229 +7,229 @@ from pathlib import Path
 
 verbose = 0
 def blab(str,level=1):
-	if verbose >= level: print(f"[config] {str}")
+    if verbose >= level: print(f"[config] {str}")
 
 def config_path(cpath):
-	return Path("Marlin", cpath)
+    return Path("Marlin", cpath)
 
 # Apply a single name = on/off ; name = value ; etc.
 # TODO: Limit to the given (optional) configuration
 def apply_opt(name, val, conf=None):
-	if name == "lcd": name, val = val, "on"
-
-	# Create a regex to match the option and capture parts of the line
-	regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
-
-	# Find and enable and/or update all matches
-	for file in ("Configuration.h", "Configuration_adv.h"):
-		fullpath = config_path(file)
-		lines = fullpath.read_text().split('\n')
-		found = False
-		for i in range(len(lines)):
-			line = lines[i]
-			match = regex.match(line)
-			if match and match[4].upper() == name.upper():
-				found = True
-				# For boolean options un/comment the define
-				if val in ("on", "", None):
-					newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
-				elif val == "off":
-					newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
-				else:
-					# For options with values, enable and set the value
-					newline = match[1] + match[3] + match[4] + match[5] + val
-					if match[8]:
-						sp = match[7] if match[7] else ' '
-						newline += sp + match[8]
-				lines[i] = newline
-				blab(f"Set {name} to {val}")
-
-		# If the option was found, write the modified lines
-		if found:
-			fullpath.write_text('\n'.join(lines))
-			break
-
-	# If the option didn't appear in either config file, add it
-	if not found:
-		# OFF options are added as disabled items so they appear
-		# in config dumps. Useful for custom settings.
-		prefix = ""
-		if val == "off":
-			prefix, val = "//", ""	# Item doesn't appear in config dump
-			#val = "false"			# Item appears in config dump
-
-		# Uppercase the option unless already mixed/uppercase
-		added = name.upper() if name.islower() else name
-
-		# Add the provided value after the name
-		if val != "on" and val != "" and val is not None:
-			added += " " + val
-
-		# Prepend the new option after the first set of #define lines
-		fullpath = config_path("Configuration.h")
-		with fullpath.open() as f:
-			lines = f.readlines()
-			linenum = 0
-			gotdef = False
-			for line in lines:
-				isdef = line.startswith("#define")
-				if not gotdef:
-					gotdef = isdef
-				elif not isdef:
-					break
-				linenum += 1
-			lines.insert(linenum, f"{prefix}#define {added} // Added by config.ini\n")
-			fullpath.write_text('\n'.join(lines))
+    if name == "lcd": name, val = val, "on"
+
+    # Create a regex to match the option and capture parts of the line
+    regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
+
+    # Find and enable and/or update all matches
+    for file in ("Configuration.h", "Configuration_adv.h"):
+        fullpath = config_path(file)
+        lines = fullpath.read_text().split('\n')
+        found = False
+        for i in range(len(lines)):
+            line = lines[i]
+            match = regex.match(line)
+            if match and match[4].upper() == name.upper():
+                found = True
+                # For boolean options un/comment the define
+                if val in ("on", "", None):
+                    newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
+                elif val == "off":
+                    newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
+                else:
+                    # For options with values, enable and set the value
+                    newline = match[1] + match[3] + match[4] + match[5] + val
+                    if match[8]:
+                        sp = match[7] if match[7] else ' '
+                        newline += sp + match[8]
+                lines[i] = newline
+                blab(f"Set {name} to {val}")
+
+        # If the option was found, write the modified lines
+        if found:
+            fullpath.write_text('\n'.join(lines))
+            break
+
+    # If the option didn't appear in either config file, add it
+    if not found:
+        # OFF options are added as disabled items so they appear
+        # in config dumps. Useful for custom settings.
+        prefix = ""
+        if val == "off":
+            prefix, val = "//", ""  # Item doesn't appear in config dump
+            #val = "false"          # Item appears in config dump
+
+        # Uppercase the option unless already mixed/uppercase
+        added = name.upper() if name.islower() else name
+
+        # Add the provided value after the name
+        if val != "on" and val != "" and val is not None:
+            added += " " + val
+
+        # Prepend the new option after the first set of #define lines
+        fullpath = config_path("Configuration.h")
+        with fullpath.open() as f:
+            lines = f.readlines()
+            linenum = 0
+            gotdef = False
+            for line in lines:
+                isdef = line.startswith("#define")
+                if not gotdef:
+                    gotdef = isdef
+                elif not isdef:
+                    break
+                linenum += 1
+            lines.insert(linenum, f"{prefix}#define {added} // Added by config.ini\n")
+            fullpath.write_text('\n'.join(lines))
 
 # Fetch configuration files from GitHub given the path.
 # Return True if any files were fetched.
 def fetch_example(url):
-	if url.endswith("/"): url = url[:-1]
-	if url.startswith('http'):
-		url = url.replace("%", "%25").replace(" ", "%20")
-	else:
-		brch = "bugfix-2.1.x"
-		if '@' in path: path, brch = map(str.strip, path.split('@'))
-		url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
-
-	# Find a suitable fetch command
-	if shutil.which("curl") is not None:
-		fetch = "curl -L -s -S -f -o"
-	elif shutil.which("wget") is not None:
-		fetch = "wget -q -O"
-	else:
-		blab("Couldn't find curl or wget", -1)
-		return False
-
-	import os
-
-	# Reset configurations to default
-	os.system("git reset --hard HEAD")
-
-	# Try to fetch the remote files
-	gotfile = False
-	for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
-		if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
-			shutil.move('wgot', config_path(fn))
-			gotfile = True
-
-	if Path('wgot').exists(): shutil.rmtree('wgot')
-
-	return gotfile
+    if url.endswith("/"): url = url[:-1]
+    if url.startswith('http'):
+        url = url.replace("%", "%25").replace(" ", "%20")
+    else:
+        brch = "bugfix-2.1.x"
+        if '@' in path: path, brch = map(str.strip, path.split('@'))
+        url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
+
+    # Find a suitable fetch command
+    if shutil.which("curl") is not None:
+        fetch = "curl -L -s -S -f -o"
+    elif shutil.which("wget") is not None:
+        fetch = "wget -q -O"
+    else:
+        blab("Couldn't find curl or wget", -1)
+        return False
+
+    import os
+
+    # Reset configurations to default
+    os.system("git reset --hard HEAD")
+
+    # Try to fetch the remote files
+    gotfile = False
+    for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
+        if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
+            shutil.move('wgot', config_path(fn))
+            gotfile = True
+
+    if Path('wgot').exists(): shutil.rmtree('wgot')
+
+    return gotfile
 
 def section_items(cp, sectkey):
-	return cp.items(sectkey) if sectkey in cp.sections() else []
+    return cp.items(sectkey) if sectkey in cp.sections() else []
 
 # Apply all items from a config section
 def apply_ini_by_name(cp, sect):
-	iniok = True
-	if sect in ('config:base', 'config:root'):
-		iniok = False
-		items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
-	else:
-		items = cp.items(sect)
+    iniok = True
+    if sect in ('config:base', 'config:root'):
+        iniok = False
+        items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
+    else:
+        items = cp.items(sect)
 
-	for item in items:
-		if iniok or not item[0].startswith('ini_'):
-			apply_opt(item[0], item[1])
+    for item in items:
+        if iniok or not item[0].startswith('ini_'):
+            apply_opt(item[0], item[1])
 
 # Apply all config sections from a parsed file
 def apply_all_sections(cp):
-	for sect in cp.sections():
-		if sect.startswith('config:'):
-			apply_ini_by_name(cp, sect)
+    for sect in cp.sections():
+        if sect.startswith('config:'):
+            apply_ini_by_name(cp, sect)
 
 # Apply certain config sections from a parsed file
 def apply_sections(cp, ckey='all'):
-	blab(f"Apply section key: {ckey}")
-	if ckey == 'all':
-		apply_all_sections(cp)
-	else:
-		# Apply the base/root config.ini settings after external files are done
-		if ckey in ('base', 'root'):
-			apply_ini_by_name(cp, 'config:base')
-
-		# Apply historically 'Configuration.h' settings everywhere
-		if ckey == 'basic':
-			apply_ini_by_name(cp, 'config:basic')
-
-		# Apply historically Configuration_adv.h settings everywhere
-		# (Some of which rely on defines in 'Conditionals_LCD.h')
-		elif ckey in ('adv', 'advanced'):
-			apply_ini_by_name(cp, 'config:advanced')
-
-		# Apply a specific config:<name> section directly
-		elif ckey.startswith('config:'):
-			apply_ini_by_name(cp, ckey)
+    blab(f"Apply section key: {ckey}")
+    if ckey == 'all':
+        apply_all_sections(cp)
+    else:
+        # Apply the base/root config.ini settings after external files are done
+        if ckey in ('base', 'root'):
+            apply_ini_by_name(cp, 'config:base')
+
+        # Apply historically 'Configuration.h' settings everywhere
+        if ckey == 'basic':
+            apply_ini_by_name(cp, 'config:basic')
+
+        # Apply historically Configuration_adv.h settings everywhere
+        # (Some of which rely on defines in 'Conditionals_LCD.h')
+        elif ckey in ('adv', 'advanced'):
+            apply_ini_by_name(cp, 'config:advanced')
+
+        # Apply a specific config:<name> section directly
+        elif ckey.startswith('config:'):
+            apply_ini_by_name(cp, ckey)
 
 # Apply settings from a top level config.ini
 def apply_config_ini(cp):
-	blab("=" * 20 + " Gather 'config.ini' entries...")
-
-	# Pre-scan for ini_use_config to get config_keys
-	base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
-	config_keys = ['base']
-	for ikey, ival in base_items:
-		if ikey == 'ini_use_config':
-			config_keys = map(str.strip, ival.split(','))
-
-	# For each ini_use_config item perform an action
-	for ckey in config_keys:
-		addbase = False
-
-		# For a key ending in .ini load and parse another .ini file
-		if ckey.endswith('.ini'):
-			sect = 'base'
-			if '@' in ckey: sect, ckey = ckey.split('@')
-			other_ini = configparser.ConfigParser()
-			other_ini.read(config_path(ckey))
-			apply_sections(other_ini, sect)
-
-		# (Allow 'example/' as a shortcut for 'examples/')
-		elif ckey.startswith('example/'):
-			ckey = 'examples' + ckey[7:]
-
-		# For 'examples/<path>' fetch an example set from GitHub.
-		# For https?:// do a direct fetch of the URL.
-		elif ckey.startswith('examples/') or ckey.startswith('http'):
-			fetch_example(ckey)
-			ckey = 'base'
-
-		# Apply keyed sections after external files are done
-		apply_sections(cp, 'config:' + ckey)
+    blab("=" * 20 + " Gather 'config.ini' entries...")
+
+    # Pre-scan for ini_use_config to get config_keys
+    base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
+    config_keys = ['base']
+    for ikey, ival in base_items:
+        if ikey == 'ini_use_config':
+            config_keys = map(str.strip, ival.split(','))
+
+    # For each ini_use_config item perform an action
+    for ckey in config_keys:
+        addbase = False
+
+        # For a key ending in .ini load and parse another .ini file
+        if ckey.endswith('.ini'):
+            sect = 'base'
+            if '@' in ckey: sect, ckey = ckey.split('@')
+            other_ini = configparser.ConfigParser()
+            other_ini.read(config_path(ckey))
+            apply_sections(other_ini, sect)
+
+        # (Allow 'example/' as a shortcut for 'examples/')
+        elif ckey.startswith('example/'):
+            ckey = 'examples' + ckey[7:]
+
+        # For 'examples/<path>' fetch an example set from GitHub.
+        # For https?:// do a direct fetch of the URL.
+        elif ckey.startswith('examples/') or ckey.startswith('http'):
+            fetch_example(ckey)
+            ckey = 'base'
+
+        # Apply keyed sections after external files are done
+        apply_sections(cp, 'config:' + ckey)
 
 if __name__ == "__main__":
-	#
-	# From command line use the given file name
-	#
-	import sys
-	args = sys.argv[1:]
-	if len(args) > 0:
-		if args[0].endswith('.ini'):
-			ini_file = args[0]
-		else:
-			print("Usage: %s <.ini file>" % sys.argv[0])
-	else:
-		ini_file = config_path('config.ini')
-
-	if ini_file:
-		user_ini = configparser.ConfigParser()
-		user_ini.read(ini_file)
-		apply_config_ini(user_ini)
+    #
+    # From command line use the given file name
+    #
+    import sys
+    args = sys.argv[1:]
+    if len(args) > 0:
+        if args[0].endswith('.ini'):
+            ini_file = args[0]
+        else:
+            print("Usage: %s <.ini file>" % sys.argv[0])
+    else:
+        ini_file = config_path('config.ini')
+
+    if ini_file:
+        user_ini = configparser.ConfigParser()
+        user_ini.read(ini_file)
+        apply_config_ini(user_ini)
 
 else:
-	#
-	# From within PlatformIO use the loaded INI file
-	#
-	import pioutil
-	if pioutil.is_pio_build():
+    #
+    # From within PlatformIO use the loaded INI file
+    #
+    import pioutil
+    if pioutil.is_pio_build():
 
-		Import("env")
+        Import("env")
 
-		try:
-			verbose = int(env.GetProjectOption('custom_verbose'))
-		except:
-			pass
+        try:
+            verbose = int(env.GetProjectOption('custom_verbose'))
+        except:
+            pass
 
-		from platformio.project.config import ProjectConfig
-		apply_config_ini(ProjectConfig())
+        from platformio.project.config import ProjectConfig
+        apply_config_ini(ProjectConfig())

+ 8 - 8
buildroot/share/PlatformIO/scripts/custom_board.py

@@ -6,13 +6,13 @@
 #
 import pioutil
 if pioutil.is_pio_build():
-	import marlin
-	board = marlin.env.BoardConfig()
+    import marlin
+    board = marlin.env.BoardConfig()
 
-	address = board.get("build.address", "")
-	if address:
-		marlin.relocate_firmware(address)
+    address = board.get("build.address", "")
+    if address:
+        marlin.relocate_firmware(address)
 
-	ldscript = board.get("build.ldscript", "")
-	if ldscript:
-		marlin.custom_ld_script(ldscript)
+    ldscript = board.get("build.ldscript", "")
+    if ldscript:
+        marlin.custom_ld_script(ldscript)

+ 42 - 42
buildroot/share/PlatformIO/scripts/download_mks_assets.py

@@ -4,50 +4,50 @@
 #
 import pioutil
 if pioutil.is_pio_build():
-	Import("env")
-	import requests,zipfile,tempfile,shutil
-	from pathlib import Path
+    Import("env")
+    import requests,zipfile,tempfile,shutil
+    from pathlib import Path
 
-	url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
-	deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
-	zip_path = deps_path / "mks-assets.zip"
-	assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
+    url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
+    deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
+    zip_path = deps_path / "mks-assets.zip"
+    assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
 
-	def download_mks_assets():
-		print("Downloading MKS Assets")
-		r = requests.get(url, stream=True)
-		# the user may have a very clean workspace,
-		# so create the PROJECT_LIBDEPS_DIR directory if not exits
-		if not deps_path.exists():
-			deps_path.mkdir()
-		with zip_path.open('wb') as fd:
-			for chunk in r.iter_content(chunk_size=128):
-				fd.write(chunk)
+    def download_mks_assets():
+        print("Downloading MKS Assets")
+        r = requests.get(url, stream=True)
+        # the user may have a very clean workspace,
+        # so create the PROJECT_LIBDEPS_DIR directory if not exits
+        if not deps_path.exists():
+            deps_path.mkdir()
+        with zip_path.open('wb') as fd:
+            for chunk in r.iter_content(chunk_size=128):
+                fd.write(chunk)
 
-	def copy_mks_assets():
-		print("Copying MKS Assets")
-		output_path = Path(tempfile.mkdtemp())
-		zip_obj = zipfile.ZipFile(zip_path, 'r')
-		zip_obj.extractall(output_path)
-		zip_obj.close()
-		if assets_path.exists() and not assets_path.is_dir():
-			assets_path.unlink()
-		if not assets_path.exists():
-			assets_path.mkdir()
-		base_path = ''
-		for filename in output_path.iterdir():
-			base_path = filename
-		fw_path = (output_path / base_path / 'Firmware')
-		font_path = fw_path / 'mks_font'
-		for filename in font_path.iterdir():
-			shutil.copy(font_path / filename, assets_path)
-		pic_path = fw_path / 'mks_pic'
-		for filename in pic_path.iterdir():
-			shutil.copy(pic_path / filename, assets_path)
-		shutil.rmtree(output_path, ignore_errors=True)
+    def copy_mks_assets():
+        print("Copying MKS Assets")
+        output_path = Path(tempfile.mkdtemp())
+        zip_obj = zipfile.ZipFile(zip_path, 'r')
+        zip_obj.extractall(output_path)
+        zip_obj.close()
+        if assets_path.exists() and not assets_path.is_dir():
+            assets_path.unlink()
+        if not assets_path.exists():
+            assets_path.mkdir()
+        base_path = ''
+        for filename in output_path.iterdir():
+            base_path = filename
+        fw_path = (output_path / base_path / 'Firmware')
+        font_path = fw_path / 'mks_font'
+        for filename in font_path.iterdir():
+            shutil.copy(font_path / filename, assets_path)
+        pic_path = fw_path / 'mks_pic'
+        for filename in pic_path.iterdir():
+            shutil.copy(pic_path / filename, assets_path)
+        shutil.rmtree(output_path, ignore_errors=True)
 
-	if not zip_path.exists():
-		download_mks_assets()
+    if not zip_path.exists():
+        download_mks_assets()
 
-	if not assets_path.exists():
-		copy_mks_assets()
+    if not assets_path.exists():
+        copy_mks_assets()

Some files were not shown because too many files changed in this diff