parent
f2ca70e232
commit
0dc1a58b24
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Create a Configuration from marlin_config.json
|
||||
#
|
||||
import json
|
||||
import sys
|
||||
import shutil
|
||||
import re
|
||||
|
||||
opt_output = '--opt' in sys.argv
|
||||
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
|
||||
|
||||
try:
|
||||
with open('marlin_config.json', 'r') as infile:
|
||||
conf = json.load(infile)
|
||||
for key in conf:
|
||||
# We don't care about the hash when restoring here
|
||||
if key == '__INITIAL_HASH':
|
||||
continue
|
||||
if key == 'VERSION':
|
||||
for k, v in sorted(conf[key].items()):
|
||||
print(k + ': ' + v)
|
||||
continue
|
||||
# The key is the file name, so let's build it now
|
||||
outfile = open('Marlin/' + key + output_suffix, 'w')
|
||||
for k, v in sorted(conf[key].items()):
|
||||
# Make define line now
|
||||
if opt_output:
|
||||
if v != '':
|
||||
if '"' in v:
|
||||
v = "'%s'" % v
|
||||
elif ' ' in v:
|
||||
v = '"%s"' % v
|
||||
define = 'opt_set ' + k + ' ' + v + '\n'
|
||||
else:
|
||||
define = 'opt_enable ' + k + '\n'
|
||||
else:
|
||||
define = '#define ' + k + ' ' + v + '\n'
|
||||
outfile.write(define)
|
||||
outfile.close()
|
||||
|
||||
# Try to apply changes to the actual configuration file (in order to keep useful comments)
|
||||
if output_suffix != '':
|
||||
# Move the existing configuration so it doesn't interfere
|
||||
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
|
||||
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
|
||||
outfile = open('Marlin/' + key, 'w')
|
||||
for line in infile_lines:
|
||||
sline = line.strip(" \t\n\r")
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split(' ')
|
||||
if kv[0] in conf[key]:
|
||||
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
|
||||
# Remove the key from the dict, so we can still write all missing keys at the end of the file
|
||||
del conf[key][kv[0]]
|
||||
else:
|
||||
outfile.write(line + '\n')
|
||||
else:
|
||||
outfile.write(line + '\n')
|
||||
# Process any remaining defines here
|
||||
for k, v in sorted(conf[key].items()):
|
||||
define = '#define ' + k + ' ' + v + '\n'
|
||||
outfile.write(define)
|
||||
outfile.close()
|
||||
|
||||
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
|
||||
except:
|
||||
print('No marlin_config.json found.')
|
@ -0,0 +1,99 @@
|
||||
#
|
||||
# preprocessor.py
|
||||
#
|
||||
import subprocess,os,re
|
||||
|
||||
verbose = 0
|
||||
|
||||
def blab(str):
|
||||
if verbose:
|
||||
print(str)
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# Invoke GCC to run the preprocessor and extract enabled features
|
||||
#
|
||||
preprocessor_cache = {}
|
||||
def run_preprocessor(env, fn=None):
|
||||
filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
|
||||
if filename in preprocessor_cache:
|
||||
return preprocessor_cache[filename]
|
||||
|
||||
# Process defines
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags = env.ParseFlagsExtended(build_flags)
|
||||
|
||||
cxx = search_compiler(env)
|
||||
cmd = ['"' + cxx + '"']
|
||||
|
||||
# Build flags from board.json
|
||||
#if 'BOARD' in env:
|
||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||
for s in build_flags['CPPDEFINES']:
|
||||
if isinstance(s, tuple):
|
||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||
else:
|
||||
cmd += ['-D' + s]
|
||||
|
||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
|
||||
depcmd = cmd + [ filename ]
|
||||
cmd = ' '.join(depcmd)
|
||||
blab(cmd)
|
||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||
preprocessor_cache[filename] = define_list
|
||||
return define_list
|
||||
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# Find a compiler, considering the OS
|
||||
#
|
||||
def search_compiler(env):
|
||||
|
||||
ENV_BUILD_PATH = os.path.join(env.Dictionary('PROJECT_BUILD_DIR'), env['PIOENV'])
|
||||
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
|
||||
|
||||
try:
|
||||
filepath = env.GetProjectOption('custom_gcc')
|
||||
blab("Getting compiler from env")
|
||||
return filepath
|
||||
except:
|
||||
pass
|
||||
|
||||
if os.path.exists(GCC_PATH_CACHE):
|
||||
blab("Getting g++ path from cache")
|
||||
with open(GCC_PATH_CACHE, 'r') as f:
|
||||
return f.read()
|
||||
|
||||
# Find the current platform compiler by searching the $PATH
|
||||
# which will be in a platformio toolchain bin folder
|
||||
path_regex = re.escape(env['PROJECT_PACKAGES_DIR'])
|
||||
gcc = "g++"
|
||||
if env['PLATFORM'] == 'win32':
|
||||
path_separator = ';'
|
||||
path_regex += r'.*\\bin'
|
||||
gcc += ".exe"
|
||||
else:
|
||||
path_separator = ':'
|
||||
path_regex += r'/.+/bin'
|
||||
|
||||
# Search for the compiler
|
||||
for pathdir in env['ENV']['PATH'].split(path_separator):
|
||||
if not re.search(path_regex, pathdir, re.IGNORECASE):
|
||||
continue
|
||||
for filepath in os.listdir(pathdir):
|
||||
if not filepath.endswith(gcc):
|
||||
continue
|
||||
# Use entire path to not rely on env PATH
|
||||
filepath = os.path.sep.join([pathdir, filepath])
|
||||
# Cache the g++ path to no search always
|
||||
if os.path.exists(ENV_BUILD_PATH):
|
||||
blab("Caching g++ for current env")
|
||||
with open(GCC_PATH_CACHE, 'w+') as f:
|
||||
f.write(filepath)
|
||||
|
||||
return filepath
|
||||
|
||||
filepath = env.get('CXX')
|
||||
blab("Couldn't find a compiler! Fallback to %s" % filepath)
|
||||
return filepath
|
@ -0,0 +1,176 @@
|
||||
#
|
||||
# signature.py
|
||||
#
|
||||
import os,subprocess,re,json,hashlib
|
||||
|
||||
#
|
||||
# The dumbest preprocessor in the world
|
||||
# Extract macro name from an header file and store them in an array
|
||||
# No processing is done here, so they are raw values here and it does not match what actually enabled
|
||||
# in the file (since you can have #if SOMETHING_UNDEFINED / #define BOB / #endif)
|
||||
# But it's useful to filter the useful macro spit out by the preprocessor from noise from the system
|
||||
# headers.
|
||||
#
|
||||
def extract_defines(filepath):
|
||||
f = open(filepath, encoding="utf8").read().split("\n")
|
||||
a = []
|
||||
for line in f:
|
||||
sline = line.strip(" \t\n\r")
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split(' ')
|
||||
a.append(kv[0])
|
||||
return a
|
||||
|
||||
# Compute the SHA256 hash of a file
|
||||
def get_file_sha256sum(filepath):
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(filepath,"rb") as f:
|
||||
# Read and update hash string value in blocks of 4K
|
||||
for byte_block in iter(lambda: f.read(4096),b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
#
|
||||
# Compress a JSON file into a zip file
|
||||
#
|
||||
import zipfile
|
||||
def compress_file(filepath, outputbase):
|
||||
with zipfile.ZipFile(outputbase + '.zip', 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
||||
zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
||||
|
||||
#
|
||||
# Compute the build signature. The idea is to extract all defines in the configuration headers
|
||||
# to build a unique reversible signature from this build so it can be included in the binary
|
||||
# We can reverse the signature to get a 1:1 equivalent configuration file
|
||||
#
|
||||
def compute_build_signature(env):
|
||||
if 'BUILD_SIGNATURE' in env:
|
||||
return
|
||||
|
||||
# Definitions from these files will be kept
|
||||
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||
|
||||
build_dir=os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||
|
||||
# Check if we can skip processing
|
||||
hashes = ''
|
||||
for header in files_to_keep:
|
||||
hashes += get_file_sha256sum(header)[0:10]
|
||||
|
||||
marlin_json = os.path.join(build_dir, 'marlin_config.json')
|
||||
marlin_zip = os.path.join(build_dir, 'mc')
|
||||
|
||||
# Read existing config file
|
||||
try:
|
||||
with open(marlin_json, 'r') as infile:
|
||||
conf = json.load(infile)
|
||||
if conf['__INITIAL_HASH'] == hashes:
|
||||
# Same configuration, skip recomputing the building signature
|
||||
compress_file(marlin_json, marlin_zip)
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
# Get enabled config options based on preprocessor
|
||||
from preprocessor import run_preprocessor
|
||||
complete_cfg = run_preprocessor(env)
|
||||
|
||||
# Dumb #define extraction from the configuration files
|
||||
real_defines = {}
|
||||
all_defines = []
|
||||
for header in files_to_keep:
|
||||
defines = extract_defines(header)
|
||||
# To filter only the define we want
|
||||
all_defines = all_defines + defines
|
||||
# To remember from which file it cames from
|
||||
real_defines[header.split('/')[-1]] = defines
|
||||
|
||||
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
||||
|
||||
# First step is to collect all valid macros
|
||||
defines = {}
|
||||
for line in complete_cfg:
|
||||
|
||||
# Split the define from the value
|
||||
key_val = line[8:].strip().decode().split(' ')
|
||||
key, value = key_val[0], ' '.join(key_val[1:])
|
||||
|
||||
# Ignore values starting with two underscore, since it's low level
|
||||
if len(key) > 2 and key[0:2] == "__" :
|
||||
continue
|
||||
# Ignore values containing a parenthesis (likely a function macro)
|
||||
if '(' in key and ')' in key:
|
||||
continue
|
||||
|
||||
# Then filter dumb values
|
||||
if r.match(value):
|
||||
continue
|
||||
|
||||
defines[key] = value if len(value) else ""
|
||||
|
||||
if not 'CONFIGURATION_EMBEDDING' in defines:
|
||||
return
|
||||
|
||||
# Second step is to filter useless macro
|
||||
resolved_defines = {}
|
||||
for key in defines:
|
||||
# Remove all boards now
|
||||
if key[0:6] == "BOARD_" and key != "BOARD_INFO_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
||||
if key[-5:] == "_NAME" and key != "CUSTOM_MACHINE_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_T_DECLARED" as it's a copy of not important system stuff
|
||||
if key[-11:] == "_T_DECLARED":
|
||||
continue
|
||||
# Remove keys that are not in the #define list in the Configuration list
|
||||
if not (key in all_defines) and key != "DETAILED_BUILD_VERSION" and key != "STRING_DISTRIBUTION_DATE":
|
||||
continue
|
||||
|
||||
# Don't be that smart guy here
|
||||
resolved_defines[key] = defines[key]
|
||||
|
||||
# Generate a build signature now
|
||||
# We are making an object that's a bit more complex than a basic dictionary here
|
||||
data = {}
|
||||
data['__INITIAL_HASH'] = hashes
|
||||
# First create a key for each header here
|
||||
for header in real_defines:
|
||||
data[header] = {}
|
||||
|
||||
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
||||
for key in resolved_defines:
|
||||
for header in real_defines:
|
||||
if key in real_defines[header]:
|
||||
data[header][key] = resolved_defines[key]
|
||||
|
||||
# Append the source code version and date
|
||||
data['VERSION'] = {}
|
||||
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
|
||||
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
|
||||
try:
|
||||
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
||||
data['VERSION']['GIT_REF'] = curver.decode()
|
||||
except:
|
||||
pass
|
||||
|
||||
with open(marlin_json, 'w') as outfile:
|
||||
json.dump(data, outfile, separators=(',', ':'))
|
||||
|
||||
# Compress the JSON file as much as we can
|
||||
compress_file(marlin_json, marlin_zip)
|
||||
|
||||
# Generate a C source file for storing this array
|
||||
with open('Marlin/src/mczip.h','wb') as result_file:
|
||||
result_file.write(b'#warning "Generated file \'mc.zip\' is embedded"\n')
|
||||
result_file.write(b'const unsigned char mc_zip[] PROGMEM = {\n ')
|
||||
count = 0
|
||||
for b in open(os.path.join(build_dir, 'mc.zip'), 'rb').read():
|
||||
result_file.write(b' 0x%02X,' % b)
|
||||
count += 1
|
||||
if (count % 16 == 0):
|
||||
result_file.write(b'\n ')
|
||||
if (count % 16):
|
||||
result_file.write(b'\n')
|
||||
result_file.write(b'};\n')
|
@ -0,0 +1,19 @@
|
||||
# Configuration Embedding
|
||||
|
||||
Starting with version 2.0.9.3, Marlin automatically extracts the configuration used to generate the firmware and stores it in the firmware binary. This is enabled by defining `CONFIGURATION_EMBEDDING` in `Configuration_adv.h`.
|
||||
|
||||
## How it's done
|
||||
To create the embedded configuration, we do a compiler pass to process the Configuration files and extract all active options. The active options are parsed into key/value pairs, serialized to JSON format, and stored in a file called `marlin_config.json`, which also includes specific build information (like the git revision, the build date, and some version information. The JSON file is then compressed in a ZIP archive called `.pio/build/mc.zip` which is converted into a C array and stored in a C++ file called `mc.h` which is included in the build.
|
||||
|
||||
## Extracting configurations from a Marlin binary
|
||||
To get the configuration out of a binary firmware, you'll need a non-write-protected SD card inserted into the printer while running the firmware.
|
||||
Send the command `M503 C` to write the file `mc.zip` to the SD card. Copy the file to your computer, ideally in the same folder as the Marlin repository.
|
||||
|
||||
Run the following commands to extract and apply the configuration:
|
||||
```
|
||||
$ git checkout -f
|
||||
$ unzip mc.zip
|
||||
$ python buildroot/share/PlatformIO/scripts/mc-apply.py
|
||||
```
|
||||
|
||||
This will attempt to update the configuration files to match the settings used for the original build. It will also dump the git reference used to build the code (which may be accessible if the firmware was built from the main repository. As a fallback it also includes the `STRING_DISTRIBUTION_DATE` which is unlikely to be modified in a fork).
|
Loading…
Reference in New Issue