mirror of
				https://github.com/davidgiven/fluxengine.git
				synced 2025-10-24 11:11:02 -07:00 
			
		
		
		
	Compare commits
	
		
			38 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 7cde8e3aa6 | ||
|  | 34fe6f0a5f | ||
|  | 76c9674f3f | ||
|  | addbabd123 | ||
|  | 46b90d9c36 | ||
|  | 7ee67082aa | ||
|  | e8042ed5f3 | ||
|  | 8828874c25 | ||
|  | 1bdb093319 | ||
|  | a1e2191ad5 | ||
|  | e61fcf1d9b | ||
|  | 610ef0dc4b | ||
|  | 273d38f237 | ||
|  | 8194a08382 | ||
|  | 6170b704b1 | ||
|  | b05f5e7caa | ||
|  | 4b38fc6044 | ||
|  | cee16a75ca | ||
|  | 9fd85a8289 | ||
|  | 2f1eff1474 | ||
|  | 8c582b8d72 | ||
|  | e49673329d | ||
|  | 07ebed83bf | ||
|  | 1def87fdc3 | ||
|  | d91fed7dd4 | ||
|  | 5f2f7e70ef | ||
|  | 83432beff6 | ||
|  | 979b550178 | ||
|  | 9062a531f3 | ||
|  | e2a6fbcf3c | ||
|  | ec16931f3a | ||
|  | 0ec0ca7495 | ||
|  | 51fa7c9371 | ||
|  | 6c69f10fe7 | ||
|  | 206e85a356 | ||
|  | 8d7dd4867b | ||
|  | d1524f78fb | ||
|  | b26735d520 | 
							
								
								
									
										6
									
								
								.github/workflows/ccpp.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/ccpp.yml
									
									
									
									
										vendored
									
									
								
							| @@ -70,7 +70,9 @@ jobs: | ||||
|       uses: actions/upload-artifact@v4 | ||||
|       with: | ||||
|         name: ${{ github.event.repository.name }}.${{ github.sha }}.fluxengine.${{ runner.arch }}.pkg | ||||
|         path: fluxengine/FluxEngine.pkg | ||||
|         path: | | ||||
|           fluxengine/FluxEngine.pkg | ||||
|           fluxengine/FluxEngine.app.zip | ||||
|  | ||||
|   build-windows: | ||||
|     runs-on: windows-latest | ||||
| @@ -86,7 +88,7 @@ jobs: | ||||
|         wsl --import fedora fedora install.tar.gz | ||||
|         wsl --set-default fedora | ||||
|         wsl sh -c 'dnf -y install https://github.com/rpmsphere/noarch/raw/master/r/rpmsphere-release-40-1.noarch.rpm' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico ninja-build' | ||||
|  | ||||
|     - name: fix line endings | ||||
|       run: | | ||||
|   | ||||
							
								
								
									
										5
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -24,7 +24,7 @@ jobs: | ||||
|         wsl --import fedora fedora install.tar.gz | ||||
|         wsl --set-default fedora | ||||
|         wsl sh -c 'dnf -y install https://github.com/rpmsphere/noarch/raw/master/r/rpmsphere-release-40-1.noarch.rpm' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico ninja-build' | ||||
|  | ||||
|     - name: fix line endings | ||||
|       run: | | ||||
| @@ -99,6 +99,7 @@ jobs: | ||||
|       run: | | ||||
|         gmake | ||||
|         mv FluxEngine.pkg FluxEngine-${{ runner.arch }}.pkg | ||||
|         mv FluxEngine.app.zip FluxEngine-${{ runner.arch }}.app.zip | ||||
|  | ||||
|     - name: tag | ||||
|       uses: EndBug/latest-tag@latest | ||||
| @@ -115,6 +116,7 @@ jobs: | ||||
|         tag: dev | ||||
|         assets: |  | ||||
|           FluxEngine-${{ runner.arch }}.pkg | ||||
|           FluxEngine-${{ runner.arch }}.app.zip | ||||
|         fail-if-no-assets: false | ||||
|  | ||||
|     - name: release | ||||
| @@ -123,6 +125,7 @@ jobs: | ||||
|         name: Development build ${{ env.RELEASE_DATE }} | ||||
|         files: | | ||||
|           FluxEngine-${{ runner.arch }}.pkg | ||||
|           FluxEngine-${{ runner.arch }}.app.zip | ||||
|         tag_name: dev | ||||
|       env: | ||||
|         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
|   | ||||
							
								
								
									
										9
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										9
									
								
								Makefile
									
									
									
									
									
								
							| @@ -12,12 +12,15 @@ ifeq ($(BUILDTYPE),windows) | ||||
| 	MINGW = i686-w64-mingw32- | ||||
| 	CC = $(MINGW)gcc | ||||
| 	CXX = $(MINGW)g++ -std=c++20 | ||||
| 	CFLAGS += -g -O3 | ||||
| 	CFLAGS += -g -O3 \ | ||||
| 		-Wno-unknown-warning-option \ | ||||
| 		-ffunction-sections \ | ||||
| 		-fdata-sections | ||||
| 	CXXFLAGS += \ | ||||
| 		-fext-numeric-literals \ | ||||
| 		-Wno-deprecated-enum-float-conversion \ | ||||
| 		-Wno-deprecated-enum-enum-conversion | ||||
| 	LDFLAGS += -static | ||||
| 	LDFLAGS += -static -Wl,--gc-sections | ||||
| 	AR = $(MINGW)ar | ||||
| 	PKG_CONFIG = $(MINGW)pkg-config -static | ||||
| 	WINDRES = $(MINGW)windres | ||||
| @@ -27,6 +30,8 @@ else | ||||
| 	CC = gcc | ||||
| 	CXX = g++ -std=c++20 | ||||
| 	CFLAGS = -g -O3 \ | ||||
| 		-Wno-unknown-warning-option | ||||
| 	CXXFLAGS += \ | ||||
| 		-Wno-deprecated-enum-float-conversion \ | ||||
| 		-Wno-deprecated-enum-enum-conversion | ||||
| 	LDFLAGS = | ||||
|   | ||||
							
								
								
									
										32
									
								
								build.py
									
									
									
									
									
								
							
							
						
						
									
										32
									
								
								build.py
									
									
									
									
									
								
							| @@ -8,8 +8,9 @@ import config | ||||
| import re | ||||
|  | ||||
| # Hack for building on Fedora/WSL; executables get the .exe extension, | ||||
| # build the build system detects it as Linux. | ||||
| # but the build system detects it as Linux. | ||||
| import build.toolchain | ||||
|  | ||||
| toolchain.Toolchain.EXE = "$(EXT)" | ||||
|  | ||||
| package(name="protobuf_lib", package="protobuf") | ||||
| @@ -27,7 +28,7 @@ else: | ||||
|         ("acorndfs", "", "--200"), | ||||
|         ("agat", "", ""), | ||||
|         ("amiga", "", ""), | ||||
|         ("apple2", "", "--140 40track_drive"), | ||||
|         ("apple2", "", "--140 --drivetype=40"), | ||||
|         ("atarist", "", "--360"), | ||||
|         ("atarist", "", "--370"), | ||||
|         ("atarist", "", "--400"), | ||||
| @@ -37,17 +38,17 @@ else: | ||||
|         ("atarist", "", "--800"), | ||||
|         ("atarist", "", "--820"), | ||||
|         ("bk", "", ""), | ||||
|         ("brother", "", "--120 40track_drive"), | ||||
|         ("brother", "", "--120 --drivetype=40"), | ||||
|         ("brother", "", "--240"), | ||||
|         ( | ||||
|             "commodore", | ||||
|             "scripts/commodore1541_test.textpb", | ||||
|             "--171 40track_drive", | ||||
|             "--171 --drivetype=40", | ||||
|         ), | ||||
|         ( | ||||
|             "commodore", | ||||
|             "scripts/commodore1541_test.textpb", | ||||
|             "--192 40track_drive", | ||||
|             "--192 --drivetype=40", | ||||
|         ), | ||||
|         ("commodore", "", "--800"), | ||||
|         ("commodore", "", "--1620"), | ||||
| @@ -59,17 +60,17 @@ else: | ||||
|         ("ibm", "", "--1232"), | ||||
|         ("ibm", "", "--1440"), | ||||
|         ("ibm", "", "--1680"), | ||||
|         ("ibm", "", "--180 40track_drive"), | ||||
|         ("ibm", "", "--160 40track_drive"), | ||||
|         ("ibm", "", "--320 40track_drive"), | ||||
|         ("ibm", "", "--360 40track_drive"), | ||||
|         ("ibm", "", "--180 --drivetype=40"), | ||||
|         ("ibm", "", "--160 --drivetype=40"), | ||||
|         ("ibm", "", "--320 --drivetype=40"), | ||||
|         ("ibm", "", "--360 --drivetype=40"), | ||||
|         ("ibm", "", "--720_96"), | ||||
|         ("ibm", "", "--720_135"), | ||||
|         ("mac", "scripts/mac400_test.textpb", "--400"), | ||||
|         ("mac", "scripts/mac800_test.textpb", "--800"), | ||||
|         ("n88basic", "", ""), | ||||
|         ("rx50", "", ""), | ||||
|         ("tartu", "", "--390 40track_drive"), | ||||
|         ("tartu", "", "--390 --drivetype=40"), | ||||
|         ("tartu", "", "--780"), | ||||
|         ("tids990", "", ""), | ||||
|         ("victor9k", "", "--612"), | ||||
| @@ -92,7 +93,7 @@ else: | ||||
|                     + c[1] | ||||
|                     + "' '" | ||||
|                     + c[2] | ||||
|                     + "' $(dir $[outs[0]]) > /dev/null" | ||||
|                     + "' $[dirname(filenameof(outs[0]))] > /dev/null" | ||||
|                 ], | ||||
|                 label="CORPUSTEST", | ||||
|             ) | ||||
| @@ -108,6 +109,13 @@ export( | ||||
|         "brother240tool$(EXT)": "tools+brother240tool", | ||||
|         "upgrade-flux-file$(EXT)": "tools+upgrade-flux-file", | ||||
|     } | ||||
|     | ({"FluxEngine.pkg": "src/gui+fluxengine_pkg"} if config.osx else {}), | ||||
|     | ( | ||||
|         { | ||||
|             "FluxEngine.pkg": "src/gui+fluxengine_pkg", | ||||
|             "FluxEngine.app.zip": "src/gui+fluxengine_app_zip", | ||||
|         } | ||||
|         if config.osx | ||||
|         else {} | ||||
|     ), | ||||
|     deps=["tests", "src/formats+docs", "scripts+mkdocindex"] + corpustests, | ||||
| ) | ||||
|   | ||||
							
								
								
									
										69
									
								
								build/ab.mk
									
									
									
									
									
								
							
							
						
						
									
										69
									
								
								build/ab.mk
									
									
									
									
									
								
							| @@ -15,16 +15,17 @@ HOSTCC ?= gcc | ||||
| HOSTCXX ?= g++ | ||||
| HOSTAR ?= ar | ||||
| HOSTCFLAGS ?= -g -Og | ||||
| HOSTCXXFLAGS ?= $(HOSTCFLAGS) | ||||
| HOSTLDFLAGS ?= -g | ||||
|  | ||||
| CC ?= $(HOSTCC) | ||||
| CXX ?= $(HOSTCXX) | ||||
| AR ?= $(HOSTAR) | ||||
| CFLAGS ?= $(HOSTCFLAGS) | ||||
| CXXFLAGS ?= $(CFLAGS) | ||||
| LDFLAGS ?= $(HOSTLDFLAGS) | ||||
|  | ||||
| export PKG_CONFIG | ||||
| export HOST_PKG_CONFIG | ||||
| NINJA ?= ninja | ||||
|  | ||||
| ifdef VERBOSE | ||||
| 	hide = | ||||
| @@ -63,37 +64,33 @@ EXT ?= | ||||
|  | ||||
| CWD=$(shell pwd) | ||||
|  | ||||
| ifeq ($(AB_ENABLE_PROGRESS_INFO),true) | ||||
| 	ifeq ($(PROGRESSINFO),) | ||||
| 	# The first make invocation here has to have its output discarded or else it | ||||
| 	# produces spurious 'Leaving directory' messages... don't know why. | ||||
| 	rulecount := $(strip $(shell $(MAKE) --no-print-directory -q $(OBJ)/build.mk PROGRESSINFO=1 > /dev/null \ | ||||
| 		&& $(MAKE) --no-print-directory -n $(MAKECMDGOALS) PROGRESSINFO=XXXPROGRESSINFOXXX | grep XXXPROGRESSINFOXXX | wc -l)) | ||||
| 	ruleindex := 1 | ||||
| 	PROGRESSINFO = "[$(ruleindex)/$(rulecount)]$(eval ruleindex := $(shell expr $(ruleindex) + 1)) " | ||||
| 	endif | ||||
| else | ||||
| 	PROGRESSINFO = "" | ||||
| endif | ||||
| define newline | ||||
|  | ||||
| PKG_CONFIG_HASHES = $(OBJ)/.pkg-config-hashes/target-$(word 1, $(shell $(PKG_CONFIG) --list-all | md5sum)) | ||||
| HOST_PKG_CONFIG_HASHES = $(OBJ)/.pkg-config-hashes/host-$(word 1, $(shell $(HOST_PKG_CONFIG) --list-all | md5sum)) | ||||
|  | ||||
| $(OBJ)/build.mk : $(PKG_CONFIG_HASHES) $(HOST_PKG_CONFIG_HASHES) | ||||
| $(PKG_CONFIG_HASHES) $(HOST_PKG_CONFIG_HASHES) &: | ||||
| 	$(hide) rm -rf $(OBJ)/.pkg-config-hashes | ||||
| 	$(hide) mkdir -p $(OBJ)/.pkg-config-hashes | ||||
| 	$(hide) touch $(PKG_CONFIG_HASHES) $(HOST_PKG_CONFIG_HASHES) | ||||
| endef | ||||
|  | ||||
| include $(OBJ)/build.mk | ||||
| define check_for_command | ||||
|   $(shell command -v $1 >/dev/null || (echo "Required command '$1' missing" >/dev/stderr && kill $$PPID)) | ||||
| endef | ||||
|  | ||||
| ifeq ($(OSX),yes) | ||||
| 	MAKEFLAGS += -r -j$(shell sysctl -n hw.logicalcpu) | ||||
| else | ||||
| 	MAKEFLAGS += -r -j$(shell nproc) | ||||
| endif | ||||
| $(call check_for_command,ninja) | ||||
| $(call check_for_command,cmp) | ||||
| $(call check_for_command,$(PYTHON)) | ||||
|  | ||||
| .DELETE_ON_ERROR: | ||||
| pkg-config-hash = $(shell ($(PKG_CONFIG) --list-all && $(HOST_PKG_CONFIG) --list-all) | md5sum) | ||||
| build-files = $(shell find . -name .obj -prune -o \( -name 'build.py' -a -type f \) -print) $(wildcard build/*.py) $(wildcard config.py) | ||||
| build-file-timestamps = $(shell ls -l $(build-files) | md5sum) | ||||
|  | ||||
| # Wipe the build file (forcing a regeneration) if the make environment is different. | ||||
| # (Conveniently, this includes the pkg-config hash calculated above.) | ||||
|  | ||||
| ignored-variables = MAKE_RESTARTS .VARIABLES MAKECMDGOALS MAKEFLAGS MFLAGS | ||||
| $(shell mkdir -p $(OBJ)) | ||||
| $(file >$(OBJ)/newvars.txt,$(foreach v,$(filter-out $(ignored-variables),$(.VARIABLES)),$(v)=$($(v))$(newline))) | ||||
| $(shell touch $(OBJ)/vars.txt) | ||||
| #$(shell diff -u $(OBJ)/vars.txt $(OBJ)/newvars.txt > /dev/stderr) | ||||
| $(shell cmp -s $(OBJ)/newvars.txt $(OBJ)/vars.txt || (rm -f $(OBJ)/build.ninja && echo "Environment changed --- regenerating" > /dev/stderr)) | ||||
| $(shell mv $(OBJ)/newvars.txt $(OBJ)/vars.txt) | ||||
|  | ||||
| .PHONY: update-ab | ||||
| update-ab: | ||||
| @@ -108,9 +105,15 @@ clean:: | ||||
| 	$(hide) rm -rf $(OBJ) | ||||
|  | ||||
| export PYTHONHASHSEED = 1 | ||||
| build-files = $(shell find . -name 'build.py') $(wildcard build/*.py) $(wildcard config.py) | ||||
| $(OBJ)/build.mk: Makefile $(build-files) build/ab.mk | ||||
| $(OBJ)/build.ninja $(OBJ)/build.targets &: | ||||
| 	@echo "AB" | ||||
| 	@mkdir -p $(OBJ) | ||||
| 	$(hide) $(PYTHON) -X pycache_prefix=$(OBJ)/__pycache__ build/ab.py -o $@ build.py \ | ||||
| 		|| rm -f $@ | ||||
| 	$(hide) $(PYTHON) -X pycache_prefix=$(OBJ)/__pycache__ build/ab.py \ | ||||
| 		-o $(OBJ) build.py \ | ||||
| 		-v $(OBJ)/vars.txt \ | ||||
| 		|| (rm -f $@ && false) | ||||
|  | ||||
| include $(OBJ)/build.targets | ||||
| .PHONY: $(ninja-targets) | ||||
| .NOTPARALLEL: | ||||
| $(ninja-targets): $(OBJ)/build.ninja | ||||
| 	+$(hide) $(NINJA) -f $(OBJ)/build.ninja $@ | ||||
|   | ||||
							
								
								
									
										2
									
								
								build/ab.ninja
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								build/ab.ninja
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| rule rule | ||||
|     command = $command | ||||
							
								
								
									
										261
									
								
								build/ab.py
									
									
									
									
									
								
							
							
						
						
									
										261
									
								
								build/ab.py
									
									
									
									
									
								
							| @@ -1,36 +1,32 @@ | ||||
| from collections import namedtuple | ||||
| from copy import copy | ||||
| from importlib.machinery import SourceFileLoader, PathFinder, ModuleSpec | ||||
| from os.path import * | ||||
| from pathlib import Path | ||||
| from typing import Iterable | ||||
| import argparse | ||||
| import ast | ||||
| import builtins | ||||
| from copy import copy | ||||
| import functools | ||||
| import hashlib | ||||
| import importlib | ||||
| import importlib.util | ||||
| from importlib.machinery import ( | ||||
|     SourceFileLoader, | ||||
|     PathFinder, | ||||
|     ModuleSpec, | ||||
| ) | ||||
| import inspect | ||||
| import os | ||||
| import re | ||||
| import string | ||||
| import sys | ||||
| import hashlib | ||||
| import re | ||||
| import ast | ||||
| from collections import namedtuple | ||||
| import types | ||||
|  | ||||
| VERBOSE_MK_FILE = False | ||||
| VERBOSE_NINJA_FILE = False | ||||
|  | ||||
| verbose = False | ||||
| quiet = False | ||||
| cwdStack = [""] | ||||
| targets = {} | ||||
| unmaterialisedTargets = {}  # dict, not set, to get consistent ordering | ||||
| materialisingStack = [] | ||||
| defaultGlobals = {} | ||||
| globalId = 1 | ||||
| wordCache = {} | ||||
| outputTargets = set() | ||||
|  | ||||
| RE_FORMAT_SPEC = re.compile( | ||||
|     r"(?:(?P<fill>[\s\S])?(?P<align>[<>=^]))?" | ||||
| @@ -52,6 +48,15 @@ sys.path += ["."] | ||||
| old_import = builtins.__import__ | ||||
|  | ||||
|  | ||||
| class Environment(types.SimpleNamespace): | ||||
|     def setdefault(self, name, value): | ||||
|         if not hasattr(self, name): | ||||
|             setattr(self, name, value) | ||||
|  | ||||
|  | ||||
| G = Environment() | ||||
|  | ||||
|  | ||||
| class PathFinderImpl(PathFinder): | ||||
|     def find_spec(self, fullname, path, target=None): | ||||
|         # The second test here is needed for Python 3.9. | ||||
| @@ -102,27 +107,88 @@ def error(message): | ||||
|     raise ABException(message) | ||||
|  | ||||
|  | ||||
| def _undo_escaped_dollar(s, op): | ||||
|     return s.replace(f"$${op}", f"${op}") | ||||
|  | ||||
|  | ||||
| class BracketedFormatter(string.Formatter): | ||||
|     def parse(self, format_string): | ||||
|         while format_string: | ||||
|             left, *right = format_string.split("$[", 1) | ||||
|             if not right: | ||||
|                 yield (left, None, None, None) | ||||
|             m = re.search(f"(?:[^$]|^)()\\$\\[()", format_string) | ||||
|             if not m: | ||||
|                 yield ( | ||||
|                     _undo_escaped_dollar(format_string, "["), | ||||
|                     None, | ||||
|                     None, | ||||
|                     None, | ||||
|                 ) | ||||
|                 break | ||||
|             right = right[0] | ||||
|             left = format_string[: m.start(1)] | ||||
|             right = format_string[m.end(2) :] | ||||
|  | ||||
|             offset = len(right) + 1 | ||||
|             try: | ||||
|                 ast.parse(right) | ||||
|             except SyntaxError as e: | ||||
|                 if not str(e).startswith("unmatched ']'"): | ||||
|                 if not str(e).startswith(f"unmatched ']'"): | ||||
|                     raise e | ||||
|                 offset = e.offset | ||||
|  | ||||
|             expr = right[0 : offset - 1] | ||||
|             format_string = right[offset:] | ||||
|  | ||||
|             yield (left if left else None, expr, None, None) | ||||
|             yield ( | ||||
|                 _undo_escaped_dollar(left, "[") if left else None, | ||||
|                 expr, | ||||
|                 None, | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|  | ||||
| class GlobalFormatter(string.Formatter): | ||||
|     def parse(self, format_string): | ||||
|         while format_string: | ||||
|             m = re.search(f"(?:[^$]|^)()\\$\\(([^)]*)\\)()", format_string) | ||||
|             if not m: | ||||
|                 yield ( | ||||
|                     format_string, | ||||
|                     None, | ||||
|                     None, | ||||
|                     None, | ||||
|                 ) | ||||
|                 break | ||||
|             left = format_string[: m.start(1)] | ||||
|             var = m[2] | ||||
|             format_string = format_string[m.end(3) :] | ||||
|  | ||||
|             yield ( | ||||
|                 left if left else None, | ||||
|                 var, | ||||
|                 None, | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|     def get_field(self, name, a1, a2): | ||||
|         return ( | ||||
|             getattr(G, name), | ||||
|             False, | ||||
|         ) | ||||
|  | ||||
|     def format_field(self, value, format_spec): | ||||
|         if not value: | ||||
|             return "" | ||||
|         return str(value) | ||||
|  | ||||
|  | ||||
| globalFormatter = GlobalFormatter() | ||||
|  | ||||
|  | ||||
| def substituteGlobalVariables(value): | ||||
|     while True: | ||||
|         oldValue = value | ||||
|         value = globalFormatter.format(value) | ||||
|         if value == oldValue: | ||||
|             return _undo_escaped_dollar(value, "(") | ||||
|  | ||||
|  | ||||
| def Rule(func): | ||||
| @@ -187,12 +253,10 @@ def _isiterable(xs): | ||||
|  | ||||
| class Target: | ||||
|     def __init__(self, cwd, name): | ||||
|         if verbose: | ||||
|             print("rule('%s', cwd='%s'" % (name, cwd)) | ||||
|         self.name = name | ||||
|         self.localname = self.name.rsplit("+")[-1] | ||||
|         self.traits = set() | ||||
|         self.dir = join("$(OBJ)", name) | ||||
|         self.dir = join(G.OBJ, name) | ||||
|         self.ins = [] | ||||
|         self.outs = [] | ||||
|         self.deps = [] | ||||
| @@ -232,7 +296,8 @@ class Target: | ||||
|                     [selfi.templateexpand(f) for f in filenamesof(value)] | ||||
|                 ) | ||||
|  | ||||
|         return Formatter().format(s) | ||||
|         s = Formatter().format(s) | ||||
|         return substituteGlobalVariables(s) | ||||
|  | ||||
|     def materialise(self, replacing=False): | ||||
|         if self not in unmaterialisedTargets: | ||||
| @@ -341,10 +406,10 @@ def targetof(value, cwd=None): | ||||
|             elif value.startswith("./"): | ||||
|                 value = normpath(join(cwd, value)) | ||||
|         # Explicit directories are always raw files. | ||||
|         elif value.endswith("/"): | ||||
|         if value.endswith("/"): | ||||
|             return _filetarget(value, cwd) | ||||
|         # Anything starting with a variable expansion is always a raw file. | ||||
|         elif value.startswith("$"): | ||||
|         # Anything in .obj is a raw file. | ||||
|         elif value.startswith(outputdir) or value.startswith(G.OBJ): | ||||
|             return _filetarget(value, cwd) | ||||
|  | ||||
|         # If this is not a rule lookup... | ||||
| @@ -467,78 +532,71 @@ def emit(*args, into=None): | ||||
|     if into is not None: | ||||
|         into += [s] | ||||
|     else: | ||||
|         outputFp.write(s) | ||||
|         ninjaFp.write(s) | ||||
|  | ||||
|  | ||||
| def shell(*args): | ||||
|     s = "".join(args) + "\n" | ||||
|     shellFp.write(s) | ||||
|  | ||||
|  | ||||
| def emit_rule(self, ins, outs, cmds=[], label=None): | ||||
|     name = self.name | ||||
|     fins_list = filenamesof(ins) | ||||
|     fins = set(fins_list) | ||||
|     fouts = filenamesof(outs) | ||||
|     nonobjs = [f for f in fouts if not f.startswith("$(OBJ)")] | ||||
|     fins = [self.templateexpand(f) for f in set(filenamesof(ins))] | ||||
|     fouts = [self.templateexpand(f) for f in filenamesof(outs)] | ||||
|  | ||||
|     global outputTargets | ||||
|     outputTargets.update(fouts) | ||||
|     outputTargets.add(name) | ||||
|  | ||||
|     emit("") | ||||
|     if VERBOSE_MK_FILE: | ||||
|     if VERBOSE_NINJA_FILE: | ||||
|         for k, v in self.args.items(): | ||||
|             emit(f"# {k} = {v}") | ||||
|  | ||||
|     lines = [] | ||||
|     if nonobjs: | ||||
|         emit("clean::", into=lines) | ||||
|         emit("\t$(hide) rm -f", *nonobjs, into=lines) | ||||
|  | ||||
|     hashable = cmds + fins_list + fouts | ||||
|     hash = hashlib.sha1(bytes("\n".join(hashable), "utf-8")).hexdigest() | ||||
|     hashfile = join(self.dir, f"hash_{hash}") | ||||
|  | ||||
|     global globalId | ||||
|     emit(".PHONY:", name, into=lines) | ||||
|     if outs: | ||||
|         outsn = globalId | ||||
|         globalId = globalId + 1 | ||||
|         insn = globalId | ||||
|         globalId = globalId + 1 | ||||
|  | ||||
|         emit(f"OUTS_{outsn}", "=", *fouts, into=lines) | ||||
|         emit(f"INS_{insn}", "=", *fins, into=lines) | ||||
|         emit(name, ":", f"$(OUTS_{outsn})", into=lines) | ||||
|         emit(hashfile, ":", into=lines) | ||||
|         emit(f"\t@mkdir -p {self.dir}", into=lines) | ||||
|         emit(f"\t@touch {hashfile}", into=lines) | ||||
|         emit( | ||||
|             f"$(OUTS_{outsn})", | ||||
|             "&:" if len(fouts) > 1 else ":", | ||||
|             f"$(INS_{insn})", | ||||
|             hashfile, | ||||
|             into=lines, | ||||
|         ) | ||||
|  | ||||
|         if label: | ||||
|             emit("\t$(hide)", "$(ECHO) $(PROGRESSINFO)" + label, into=lines) | ||||
|         os.makedirs(self.dir, exist_ok=True) | ||||
|         rule = [] | ||||
|  | ||||
|         sandbox = join(self.dir, "sandbox") | ||||
|         emit("\t$(hide)", f"rm -rf {sandbox}", into=lines) | ||||
|         emit(f"rm -rf {sandbox}", into=rule) | ||||
|         emit( | ||||
|             "\t$(hide)", | ||||
|             "$(PYTHON) build/_sandbox.py --link -s", | ||||
|             sandbox, | ||||
|             f"$(INS_{insn})", | ||||
|             into=lines, | ||||
|             f"{G.PYTHON} build/_sandbox.py --link -s", sandbox, *fins, into=rule | ||||
|         ) | ||||
|         for c in cmds: | ||||
|             emit(f"\t$(hide) cd {sandbox} && (", c, ")", into=lines) | ||||
|             emit(f"(cd {sandbox} &&", c, ")", into=rule) | ||||
|         emit( | ||||
|             "\t$(hide)", | ||||
|             "$(PYTHON) build/_sandbox.py --export -s", | ||||
|             f"{G.PYTHON} build/_sandbox.py --export -s", | ||||
|             sandbox, | ||||
|             f"$(OUTS_{outsn})", | ||||
|             into=lines, | ||||
|             *fouts, | ||||
|             into=rule, | ||||
|         ) | ||||
|  | ||||
|         ruletext = "".join(rule) | ||||
|         if len(ruletext) > 7000: | ||||
|             rulehash = hashlib.sha1(ruletext.encode()).hexdigest() | ||||
|  | ||||
|             rulef = join(self.dir, f"rule-{rulehash}.sh") | ||||
|             with open(rulef, "wt") as fp: | ||||
|                 fp.write("set -e\n") | ||||
|                 fp.write(ruletext) | ||||
|  | ||||
|             emit("build", *fouts, ":rule", *fins, rulef) | ||||
|             emit(" command=sh", rulef) | ||||
|         else: | ||||
|             emit("build", *fouts, ":rule", *fins) | ||||
|             emit( | ||||
|                 " command=", | ||||
|                 "&&".join([s.strip() for s in rule]).replace("$", "$$"), | ||||
|             ) | ||||
|         if label: | ||||
|             emit(" description=", label) | ||||
|         emit("build", name, ":phony", *fouts) | ||||
|  | ||||
|     else: | ||||
|         assert len(cmds) == 0, "rules with no outputs cannot have commands" | ||||
|         emit(name, ":", *fins, into=lines) | ||||
|         emit("build", name, ":phony", *fins) | ||||
|  | ||||
|     outputFp.write("".join(lines)) | ||||
|     emit("") | ||||
|  | ||||
|  | ||||
| @@ -585,47 +643,65 @@ def export(self, name=None, items: TargetsMap = {}, deps: Targets = []): | ||||
|         dest = self.targetof(dest) | ||||
|         outs += [dest] | ||||
|  | ||||
|         destf = filenameof(dest) | ||||
|         destf = self.templateexpand(filenameof(dest)) | ||||
|         outputTargets.update([destf]) | ||||
|  | ||||
|         srcs = filenamesof([src]) | ||||
|         assert ( | ||||
|             len(srcs) == 1 | ||||
|         ), "a dependency of an exported file must have exactly one output file" | ||||
|         srcf = self.templateexpand(srcs[0]) | ||||
|  | ||||
|         subrule = simplerule( | ||||
|             name=f"{self.localname}/{destf}", | ||||
|             cwd=self.cwd, | ||||
|             ins=[srcs[0]], | ||||
|             outs=[destf], | ||||
|             commands=["$(CP) -H %s %s" % (srcs[0], destf)], | ||||
|             label="", | ||||
|             commands=["$(CP) -H %s %s" % (srcf, destf)], | ||||
|             label="EXPORT", | ||||
|         ) | ||||
|         subrule.materialise() | ||||
|  | ||||
|     self.ins = [] | ||||
|     self.outs = deps + outs | ||||
|     outputTargets.add(name) | ||||
|  | ||||
|     emit("") | ||||
|     emit(".PHONY:", name) | ||||
|     emit(name, ":", *filenamesof(outs + deps)) | ||||
|     emit( | ||||
|         "build", | ||||
|         name, | ||||
|         ":phony", | ||||
|         *[self.templateexpand(f) for f in filenamesof(outs + deps)], | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     parser = argparse.ArgumentParser() | ||||
|     parser.add_argument("-v", "--verbose", action="store_true") | ||||
|     parser.add_argument("-q", "--quiet", action="store_true") | ||||
|     parser.add_argument("-o", "--output") | ||||
|     parser.add_argument("-v", "--varfile") | ||||
|     parser.add_argument("-o", "--outputdir") | ||||
|     parser.add_argument("-D", "--define", action="append", default=[]) | ||||
|     parser.add_argument("files", nargs="+") | ||||
|     args = parser.parse_args() | ||||
|  | ||||
|     global verbose | ||||
|     verbose = args.verbose | ||||
|  | ||||
|     global quiet | ||||
|     quiet = args.quiet | ||||
|  | ||||
|     global outputFp | ||||
|     outputFp = open(args.output, "wt") | ||||
|     vardefs = args.define | ||||
|     if args.varfile: | ||||
|         with open(args.varfile, "rt") as fp: | ||||
|             vardefs = vardefs + list(fp) | ||||
|  | ||||
|     for line in vardefs: | ||||
|         if "=" in line: | ||||
|             name, value = line.split("=", 1) | ||||
|             G.setdefault(name.strip(), value.strip()) | ||||
|  | ||||
|     global ninjaFp, shellFp, outputdir | ||||
|     outputdir = args.outputdir | ||||
|     G.setdefault("OBJ", outputdir) | ||||
|     ninjaFp = open(outputdir + "/build.ninja", "wt") | ||||
|     ninjaFp.write(f"include build/ab.ninja\n") | ||||
|  | ||||
|     for k in ["Rule"]: | ||||
|         defaultGlobals[k] = globals()[k] | ||||
| @@ -640,7 +716,10 @@ def main(): | ||||
|     while unmaterialisedTargets: | ||||
|         t = next(iter(unmaterialisedTargets)) | ||||
|         t.materialise() | ||||
|     emit("AB_LOADED = 1\n") | ||||
|  | ||||
|     with open(outputdir + "/build.targets", "wt") as fp: | ||||
|         fp.write("ninja-targets =") | ||||
|         fp.write(substituteGlobalVariables(" ".join(outputTargets))) | ||||
|  | ||||
|  | ||||
| main() | ||||
|   | ||||
							
								
								
									
										53
									
								
								build/c.py
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								build/c.py
									
									
									
									
									
								
							| @@ -7,23 +7,22 @@ from build.ab import ( | ||||
|     flatten, | ||||
|     simplerule, | ||||
|     emit, | ||||
|     G, | ||||
| ) | ||||
| from build.utils import filenamesmatchingof, stripext, collectattrs | ||||
| from build.utils import stripext, collectattrs | ||||
| from build.toolchain import Toolchain, HostToolchain | ||||
| from os.path import * | ||||
|  | ||||
| emit( | ||||
|     """ | ||||
| ifeq ($(OSX),no) | ||||
| STARTGROUP ?= -Wl,--start-group | ||||
| ENDGROUP ?= -Wl,--end-group | ||||
| endif | ||||
| """ | ||||
| ) | ||||
| if G.OSX != "yes": | ||||
|     G.STARTGROUP = "-Wl,--start-group" | ||||
|     G.ENDGROUP = "-Wl,--end-group" | ||||
| else: | ||||
|     G.STARTGROUP = "" | ||||
|     G.ENDGROUP = "" | ||||
|  | ||||
| Toolchain.CC = ["$(CC) -c -o $[outs[0]] $[ins[0]] $(CFLAGS) $[cflags]"] | ||||
| Toolchain.CPP = ["$(CC) -E -P -o $[outs] $[cflags] -x c $[ins]"] | ||||
| Toolchain.CXX = ["$(CXX) -c -o $[outs[0]] $[ins[0]] $(CFLAGS) $[cflags]"] | ||||
| Toolchain.CXX = ["$(CXX) -c -o $[outs[0]] $[ins[0]] $(CXXFLAGS) $[cflags]"] | ||||
| Toolchain.AR = ["$(AR) cqs $[outs[0]] $[ins]"] | ||||
| Toolchain.ARXX = ["$(AR) cqs $[outs[0]] $[ins]"] | ||||
| Toolchain.CLINK = [ | ||||
| @@ -70,13 +69,9 @@ def _toolchain_find_header_targets(deps, initial=[]): | ||||
| Toolchain.find_c_header_targets = _toolchain_find_header_targets | ||||
|  | ||||
|  | ||||
| HostToolchain.CC = [ | ||||
|     "$(HOSTCC) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]" | ||||
| ] | ||||
| HostToolchain.CC = ["$(HOSTCC) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]"] | ||||
| HostToolchain.CPP = ["$(HOSTCC) -E -P -o $[outs] $[cflags] -x c $[ins]"] | ||||
| HostToolchain.CXX = [ | ||||
|     "$(HOSTCXX) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]" | ||||
| ] | ||||
| HostToolchain.CXX = ["$(HOSTCXX) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]"] | ||||
| HostToolchain.AR = ["$(HOSTAR) cqs $[outs[0]] $[ins]"] | ||||
| HostToolchain.ARXX = ["$(HOSTAR) cqs $[outs[0]] $[ins]"] | ||||
| HostToolchain.CLINK = [ | ||||
| @@ -102,9 +97,7 @@ def _indirect(deps, name): | ||||
|     return r | ||||
|  | ||||
|  | ||||
| def cfileimpl( | ||||
|     self, name, srcs, deps, suffix, commands, label, toolchain, cflags | ||||
| ): | ||||
| def cfileimpl(self, name, srcs, deps, suffix, commands, label, toolchain, cflags): | ||||
|     outleaf = "=" + stripext(basename(filenameof(srcs[0]))) + suffix | ||||
|  | ||||
|     hdr_deps = toolchain.find_c_header_targets(deps) | ||||
| @@ -114,9 +107,7 @@ def cfileimpl( | ||||
|         if ("cheader_deps" not in d.args) and ("clibrary_deps" not in d.args) | ||||
|     ] | ||||
|     hdr_files = collectattrs(targets=hdr_deps, name="cheader_files") | ||||
|     cflags = collectattrs( | ||||
|         targets=hdr_deps, name="caller_cflags", initial=cflags | ||||
|     ) | ||||
|     cflags = collectattrs(targets=hdr_deps, name="caller_cflags", initial=cflags) | ||||
|  | ||||
|     t = simplerule( | ||||
|         replaces=self, | ||||
| @@ -194,7 +185,7 @@ def findsources(self, srcs, deps, cflags, filerule, toolchain, cwd): | ||||
|     for s in flatten(srcs): | ||||
|         objs += [ | ||||
|             filerule( | ||||
|                 name=join(self.localname, _removeprefix(f, "$(OBJ)/")), | ||||
|                 name=join(self.localname, _removeprefix(f, G.OBJ + "/")), | ||||
|                 srcs=[f], | ||||
|                 deps=deps, | ||||
|                 cflags=sorted(set(cflags)), | ||||
| @@ -239,9 +230,7 @@ def libraryimpl( | ||||
|         i = 0 | ||||
|         for dest, src in hdrs.items(): | ||||
|             s = filenamesof([src]) | ||||
|             assert ( | ||||
|                 len(s) == 1 | ||||
|             ), "the target of a header must return exactly one file" | ||||
|             assert len(s) == 1, "the target of a header must return exactly one file" | ||||
|  | ||||
|             cs += [f"$(CP) $[ins[{i}]] $[outs[{i}]]"] | ||||
|             outs += ["=" + dest] | ||||
| @@ -431,15 +420,11 @@ def programimpl( | ||||
|     label, | ||||
|     filerule, | ||||
| ): | ||||
|     cfiles = findsources( | ||||
|         self, srcs, deps, cflags, filerule, toolchain, self.cwd | ||||
|     ) | ||||
|     cfiles = findsources(self, srcs, deps, cflags, filerule, toolchain, self.cwd) | ||||
|  | ||||
|     lib_deps = toolchain.find_c_library_targets(deps) | ||||
|     libs = collectattrs(targets=lib_deps, name="clibrary_files") | ||||
|     ldflags = collectattrs( | ||||
|         targets=lib_deps, name="caller_ldflags", initial=ldflags | ||||
|     ) | ||||
|     ldflags = collectattrs(targets=lib_deps, name="caller_ldflags", initial=ldflags) | ||||
|  | ||||
|     simplerule( | ||||
|         replaces=self, | ||||
| @@ -558,9 +543,7 @@ def hostcxxprogram( | ||||
|  | ||||
| def _cppfileimpl(self, name, srcs, deps, cflags, toolchain): | ||||
|     hdr_deps = _indirect(deps, "cheader_deps") | ||||
|     cflags = collectattrs( | ||||
|         targets=hdr_deps, name="caller_cflags", initial=cflags | ||||
|     ) | ||||
|     cflags = collectattrs(targets=hdr_deps, name="caller_cflags", initial=cflags) | ||||
|  | ||||
|     simplerule( | ||||
|         replaces=self, | ||||
|   | ||||
							
								
								
									
										10
									
								
								build/pkg.py
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								build/pkg.py
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | ||||
| from build.ab import Rule, Target | ||||
| from build.ab import Rule, Target, G | ||||
| import os | ||||
| import subprocess | ||||
|  | ||||
| @@ -31,8 +31,8 @@ class _PkgConfig: | ||||
|         return self.package_properties[p] | ||||
|  | ||||
|  | ||||
| TargetPkgConfig = _PkgConfig(os.getenv("PKG_CONFIG")) | ||||
| HostPkgConfig = _PkgConfig(os.getenv("HOST_PKG_CONFIG")) | ||||
| TargetPkgConfig = _PkgConfig(G.PKG_CONFIG) | ||||
| HostPkgConfig = _PkgConfig(G.HOST_PKG_CONFIG) | ||||
|  | ||||
|  | ||||
| def _package(self, name, package, fallback, pkgconfig): | ||||
| @@ -49,9 +49,7 @@ def _package(self, name, package, fallback, pkgconfig): | ||||
|         self.traits.update({"clibrary", "cxxlibrary"}) | ||||
|         return | ||||
|  | ||||
|     assert ( | ||||
|         fallback | ||||
|     ), f"Required package '{package}' not installed when materialising target '$[name]'" | ||||
|     assert fallback, f"Required package '{package}' not installed" | ||||
|  | ||||
|     if "cheader_deps" in fallback.args: | ||||
|         self.args["cheader_deps"] = fallback.args["cheader_deps"] | ||||
|   | ||||
| @@ -1,14 +1,10 @@ | ||||
| from build.ab import Rule, Targets, emit, simplerule, filenamesof | ||||
| from build.ab import Rule, Targets, emit, simplerule, filenamesof, G | ||||
| from build.utils import filenamesmatchingof, collectattrs | ||||
| from os.path import join, abspath, dirname, relpath | ||||
| from build.pkg import has_package | ||||
|  | ||||
| emit( | ||||
|     """ | ||||
| PROTOC ?= protoc | ||||
| HOSTPROTOC ?= protoc | ||||
| """ | ||||
| ) | ||||
| G.setdefault("PROTOC", "protoc") | ||||
| G.setdefault("HOSTPROTOC", "hostprotoc") | ||||
|  | ||||
| assert has_package("protobuf"), "required package 'protobuf' not installed" | ||||
|  | ||||
|   | ||||
| @@ -7,10 +7,13 @@ from build.ab import ( | ||||
|     cwdStack, | ||||
|     error, | ||||
|     simplerule, | ||||
|     G | ||||
| ) | ||||
| from os.path import relpath, splitext, join, basename, isfile | ||||
| from glob import iglob | ||||
| import fnmatch | ||||
| import subprocess | ||||
| import shutil | ||||
|  | ||||
|  | ||||
| def filenamesmatchingof(xs, pattern): | ||||
| @@ -51,6 +54,16 @@ def itemsof(pattern, root=None, cwd=None): | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def does_command_exist(cmd): | ||||
|     basecmd = cmd.strip().split()[0] | ||||
|     return shutil.which(basecmd) | ||||
|  | ||||
|  | ||||
| def shell(cmd): | ||||
|     r = subprocess.check_output([G.SHELL, "-c", cmd]) | ||||
|     return r.decode("utf-8").strip() | ||||
|  | ||||
|  | ||||
| @Rule | ||||
| def objectify(self, name, src: Target, symbol): | ||||
|     simplerule( | ||||
|   | ||||
| @@ -7,9 +7,7 @@ from build.ab import ( | ||||
|  | ||||
|  | ||||
| @Rule | ||||
| def zip( | ||||
|     self, name, flags="", items: TargetsMap = {}, extension="zip", label="ZIP" | ||||
| ): | ||||
| def zip(self, name, flags="", items: TargetsMap = {}, extension="zip", label="ZIP"): | ||||
|     cs = ["$(PYTHON) build/_zip.py -z $[outs]"] | ||||
|  | ||||
|     ins = [] | ||||
|   | ||||
| @@ -35,7 +35,7 @@ clibrary( | ||||
|         "./config.h", | ||||
|         "./src/adflib.h", | ||||
|     ], | ||||
|     cflags=["-Idep/adflib", "-Idep/adflib/src"], | ||||
|     cflags=["-Wno-stringop-overflow"], | ||||
|     hdrs={ | ||||
|         "adf_blk.h": "./src/adf_blk.h", | ||||
|         "adf_defs.h": "./src/adf_defs.h", | ||||
|   | ||||
| @@ -204,18 +204,18 @@ install some support packages. | ||||
|   - For Linux with Ubuntu/Debian: | ||||
| 	`libusb-1.0-0-dev`, `libsqlite3-dev`, `zlib1g-dev`, | ||||
| 	`libudev-dev`, `protobuf-compiler`, `libwxgtk3.0-gtk3-dev`, | ||||
| 	`libfmt-dev`, `python3`. | ||||
| 	`libfmt-dev`, `python3`. `ninja-build` | ||||
|   - For Linux with Fedora/Red Hat: | ||||
|     `git`, `make`, `gcc`, `gcc-c++`, `xxd`, `protobuf-compiler`, | ||||
|     `protobuf-devel`, `fmt-devel`, `systemd-devel`, `wxGTK3-devel`, | ||||
|     `libsqlite3x-devel` | ||||
|     `libsqlite3x-devel`, `ninja-build` | ||||
|   - For OSX with Homebrew: `libusb`, `pkg-config`, `sqlite`, | ||||
|     `protobuf`, `truncate`, `wxwidgets`, `fmt`. | ||||
|     `protobuf`, `truncate`, `wxwidgets`, `fmt`. `ninja` | ||||
|   - For Windows with WSL: `protobuf-c-compiler` `protobuf-devel` `fmt-devel` | ||||
|   `systemd-devel` `sqlite-devel` `wxGTK-devel` `mingw32-gcc` `mingw32-gcc-c++` | ||||
|   `mingw32-zlib-static` `mingw32-protobuf-static` `mingw32-sqlite-static` | ||||
|   `mingw32-wxWidgets3-static` `mingw32-libpng-static` `mingw32-libjpeg-static` | ||||
|   `mingw32-libtiff-static` `mingw32-nsis png2ico` | ||||
|   `mingw32-libtiff-static` `mingw32-nsis png2ico` `ninja-build` | ||||
|  | ||||
| These lists are not necessarily exhaustive --- please [get in | ||||
| touch](https://github.com/davidgiven/fluxengine/issues/new) if I've missed | ||||
|   | ||||
| @@ -1,15 +0,0 @@ | ||||
| 40track_drive | ||||
| ==== | ||||
| ## Adjust configuration for a 40-track drive | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to read from 40-track, 48tpi 5.25" drives. You have to tell it because there is | ||||
| no way to detect this automatically. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read ibm --180 40track_drive | ||||
| ``` | ||||
|  | ||||
| @@ -31,9 +31,9 @@ they might require nudging as the side order can't be reliably autodetected. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read acornadfs --160 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --320 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --640 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --800 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --1600 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --160 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --320 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --640 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --800 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --1600 -s drive:0 -o acornadfs.img` | ||||
|  | ||||
|   | ||||
| @@ -24,13 +24,13 @@ requires a bit of fiddling as they have the same tracks on twice. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read acorndfs --100 -s drive:0 -o acorndfs.img` | ||||
|   - `fluxengine read acorndfs --200 -s drive:0 -o acorndfs.img` | ||||
|   - `fluxengine read -c acorndfs --100 -s drive:0 -o acorndfs.img` | ||||
|   - `fluxengine read -c acorndfs --200 -s drive:0 -o acorndfs.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write acorndfs --100 -d drive:0 -i acorndfs.img` | ||||
|   - `fluxengine write acorndfs --200 -d drive:0 -i acorndfs.img` | ||||
|   - `fluxengine write -c acorndfs --100 -d drive:0 -i acorndfs.img` | ||||
|   - `fluxengine write -c acorndfs --200 -d drive:0 -i acorndfs.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -37,7 +37,7 @@ based on what looks right. If anyone knows _anything_ about these disks, | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read aeslanier -s drive:0 -o aeslanier.img` | ||||
|   - `fluxengine read -c aeslanier -s drive:0 -o aeslanier.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -20,11 +20,11 @@ profile. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read agat -s drive:0 -o agat.img` | ||||
|   - `fluxengine read -c agat -s drive:0 -o agat.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write agat -d drive:0 -i agat.img` | ||||
|   - `fluxengine write -c agat -d drive:0 -i agat.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -26,11 +26,11 @@ distinctly subpar and not particularly good at detecting errors. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read amiga -s drive:0 -o amiga.adf` | ||||
|   - `fluxengine read -c amiga -s drive:0 -o amiga.adf` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write amiga -d drive:0 -i amiga.adf` | ||||
|   - `fluxengine write -c amiga -d drive:0 -i amiga.adf` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -43,8 +43,8 @@ kayinfo.lbr | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read ampro --400 -s drive:0 -o ampro.img` | ||||
|   - `fluxengine read ampro --800 -s drive:0 -o ampro.img` | ||||
|   - `fluxengine read -c ampro --400 -s drive:0 -o ampro.img` | ||||
|   - `fluxengine read -c ampro --800 -s drive:0 -o ampro.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -58,13 +58,13 @@ volume. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read apple2 --140 -s drive:0 -o apple2.img` | ||||
|   - `fluxengine read apple2 --640 -s drive:0 -o apple2.img` | ||||
|   - `fluxengine read -c apple2 --140 -s drive:0 -o apple2.img` | ||||
|   - `fluxengine read -c apple2 --640 -s drive:0 -o apple2.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write apple2 --140 -d drive:0 -i apple2.img` | ||||
|   - `fluxengine write apple2 --640 -d drive:0 -i apple2.img` | ||||
|   - `fluxengine write -c apple2 --140 -d drive:0 -i apple2.img` | ||||
|   - `fluxengine write -c apple2 --640 -d drive:0 -i apple2.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -1,16 +0,0 @@ | ||||
| apple2_drive | ||||
| ==== | ||||
| ## Adjust configuration for a 40-track Apple II drive | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to adjust the pinout and track spacing to work with an Apple II | ||||
| drive.  This only works on Greaseweazle hardware and requires a custom | ||||
| connector. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read apple2 --160 apple2_drive | ||||
| ``` | ||||
|  | ||||
| @@ -29,25 +29,25 @@ Be aware that many PC drives (including mine) won't do the 82 track formats. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read atarist --360 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --370 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --400 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --410 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --720 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --740 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --800 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --820 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --360 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --370 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --400 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --410 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --720 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --740 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --800 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --820 -s drive:0 -o atarist.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write atarist --360 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --370 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --400 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --410 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --720 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --740 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --800 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --820 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --360 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --370 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --400 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --410 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --720 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --740 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --800 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --820 -d drive:0 -i atarist.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -22,9 +22,9 @@ on what was available at the time, with the same format on both. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read bk -s drive:0 -o bk800.img` | ||||
|   - `fluxengine read -c bk -s drive:0 -o bk800.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write bk -d drive:0 -i bk800.img` | ||||
|   - `fluxengine write -c bk -d drive:0 -i bk800.img` | ||||
|  | ||||
|   | ||||
| @@ -44,13 +44,13 @@ investigate. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read brother --120 -s drive:0 -o brother.img` | ||||
|   - `fluxengine read brother --240 -s drive:0 -o brother.img` | ||||
|   - `fluxengine read -c brother --120 -s drive:0 -o brother.img` | ||||
|   - `fluxengine read -c brother --240 -s drive:0 -o brother.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write brother --120 -d drive:0 -i brother.img` | ||||
|   - `fluxengine write brother --240 -d drive:0 -i brother.img` | ||||
|   - `fluxengine write -c brother --120 -d drive:0 -i brother.img` | ||||
|   - `fluxengine write -c brother --240 -d drive:0 -i brother.img` | ||||
|  | ||||
| Dealing with misaligned disks | ||||
| ----------------------------- | ||||
|   | ||||
| @@ -54,18 +54,18 @@ A CMD FD2000 disk (a popular third-party Commodore disk drive) | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read commodore --171 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --192 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --800 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --1042 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --1620 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --171 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --192 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --800 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --1042 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --1620 -s drive:0 -o commodore.d64` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write commodore --171 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write commodore --192 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write commodore --800 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write commodore --1620 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --171 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --192 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --800 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --1620 -d drive:0 -i commodore.d64` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -33,7 +33,7 @@ images. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read eco1 -s drive:0 -o eco1.img` | ||||
|   - `fluxengine read -c eco1 -s drive:0 -o eco1.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -15,5 +15,5 @@ format itself is yet another IBM scheme variant. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read epsonpf10 -s drive:0 -o epsonpf10.img` | ||||
|   - `fluxengine read -c epsonpf10 -s drive:0 -o epsonpf10.img` | ||||
|  | ||||
|   | ||||
| @@ -36,7 +36,7 @@ touch](https://github.com/davidgiven/fluxengine/issues/new). | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read f85 -s drive:0 -o f85.img` | ||||
|   - `fluxengine read -c f85 -s drive:0 -o f85.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -30,7 +30,7 @@ I don't have access to one of those disks. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read fb100 -s drive:0 -o fb100.img` | ||||
|   - `fluxengine read -c fb100 -s drive:0 -o fb100.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -23,17 +23,17 @@ encoding scheme. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read hplif --264 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read hplif --608 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read hplif --616 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read hplif --770 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --264 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --608 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --616 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --770 -s drive:0 -o hplif.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write hplif --264 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write hplif --608 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write hplif --616 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write hplif --770 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --264 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --608 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --616 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --770 -d drive:0 -i hplif.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -55,30 +55,30 @@ image format. FluxEngine will use these parameters. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read ibm --auto -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --160 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --180 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --320 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --360 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --720_96 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --720_135 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1200 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1232 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1440 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1680 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --auto -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --160 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --180 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --320 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --360 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --720_96 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --720_135 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1200 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1232 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1440 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1680 -s drive:0 -o ibm.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write ibm --160 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --180 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --320 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --360 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --720_96 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --720_135 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1200 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1232 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1440 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1680 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --160 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --180 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --320 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --360 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --720_96 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --720_135 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1200 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1232 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1440 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1680 -d drive:0 -i ibm.img` | ||||
|  | ||||
| Mixed-format disks | ||||
| ------------------ | ||||
|   | ||||
| @@ -15,5 +15,5 @@ track! Other than that it's another IBM scheme variation. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read icl30 -s drive:0 -o icl30.img` | ||||
|   - `fluxengine read -c icl30 -s drive:0 -o icl30.img` | ||||
|  | ||||
|   | ||||
| @@ -47,13 +47,13 @@ standard for disk images is to omit it. If you want them, specify that you want | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read mac --400 -s drive:0 -o mac.dsk` | ||||
|   - `fluxengine read mac --800 -s drive:0 -o mac.dsk` | ||||
|   - `fluxengine read -c mac --400 -s drive:0 -o mac.dsk` | ||||
|   - `fluxengine read -c mac --800 -s drive:0 -o mac.dsk` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write mac --400 -d drive:0 -i mac.dsk` | ||||
|   - `fluxengine write mac --800 -d drive:0 -i mac.dsk` | ||||
|   - `fluxengine write -c mac --400 -d drive:0 -i mac.dsk` | ||||
|   - `fluxengine write -c mac --800 -d drive:0 -i mac.dsk` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -63,11 +63,11 @@ need to apply extra options to change the format if desired. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read micropolis -s drive:0 -o micropolis.img` | ||||
|   - `fluxengine read -c micropolis -s drive:0 -o micropolis.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write micropolis -d drive:0 -i micropolis.img` | ||||
|   - `fluxengine write -c micropolis -d drive:0 -i micropolis.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -52,10 +52,10 @@ Words are all stored little-endian. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read mx --110 -s drive:0 -o mx.img` | ||||
|   - `fluxengine read mx --220ds -s drive:0 -o mx.img` | ||||
|   - `fluxengine read mx --220ss -s drive:0 -o mx.img` | ||||
|   - `fluxengine read mx --440 -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --110 -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --220ds -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --220ss -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --440 -s drive:0 -o mx.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -18,9 +18,9 @@ boot ROM could only read single density data.) | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read n88basic -s drive:0 -o n88basic.img` | ||||
|   - `fluxengine read -c n88basic -s drive:0 -o n88basic.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write n88basic -d drive:0 -i n88basic.img` | ||||
|   - `fluxengine write -c n88basic -d drive:0 -i n88basic.img` | ||||
|  | ||||
|   | ||||
| @@ -31,15 +31,15 @@ equivalent to .img images. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read northstar --87 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read northstar --175 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read northstar --350 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read -c northstar --87 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read -c northstar --175 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read -c northstar --350 -s drive:0 -o northstar.nsi` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write northstar --87 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write northstar --175 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write northstar --350 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write -c northstar --87 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write -c northstar --175 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write -c northstar --350 -d drive:0 -i northstar.nsi` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -24,9 +24,9 @@ and, oddly, swapped sides. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read psos -s drive:0 -o pme.img` | ||||
|   - `fluxengine read -c psos -s drive:0 -o pme.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write psos -d drive:0 -i pme.img` | ||||
|   - `fluxengine write -c psos -d drive:0 -i pme.img` | ||||
|  | ||||
|   | ||||
| @@ -40,9 +40,9 @@ for assistance with this! | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read rolandd20 -s drive:0 -o rolandd20.img` | ||||
|   - `fluxengine read -c rolandd20 -s drive:0 -o rolandd20.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write rolandd20 -d drive:0 -i rolandd20.img` | ||||
|   - `fluxengine write -c rolandd20 -d drive:0 -i rolandd20.img` | ||||
|  | ||||
|   | ||||
| @@ -15,9 +15,9 @@ vanilla single-sided IBM scheme variation. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read rx50 -s drive:0 -o rx50.img` | ||||
|   - `fluxengine read -c rx50 -s drive:0 -o rx50.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write rx50 -d drive:0 -i rx50.img` | ||||
|   - `fluxengine write -c rx50 -d drive:0 -i rx50.img` | ||||
|  | ||||
|   | ||||
| @@ -1,15 +0,0 @@ | ||||
| shugart_drive | ||||
| ==== | ||||
| ## Adjust configuration for a Shugart drive | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to adjust the pinout to work with a Shugart drive. This only works | ||||
| on Greaseweazle hardware. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read ibm --720 shugart_drive | ||||
| ``` | ||||
|  | ||||
| @@ -26,7 +26,7 @@ this is completely correct, so don't trust it! | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read smaky6 -s drive:0 -o smaky6.img` | ||||
|   - `fluxengine read -c smaky6 -s drive:0 -o smaky6.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -34,13 +34,13 @@ FluxEngine supports reading and writing Tartu disks with CP/M filesystem access. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read tartu --390 -s drive:0 -o tartu.img` | ||||
|   - `fluxengine read tartu --780 -s drive:0 -o tartu.img` | ||||
|   - `fluxengine read -c tartu --390 -s drive:0 -o tartu.img` | ||||
|   - `fluxengine read -c tartu --780 -s drive:0 -o tartu.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write tartu --390 -d drive:0 -i tartu.img` | ||||
|   - `fluxengine write tartu --780 -d drive:0 -i tartu.img` | ||||
|   - `fluxengine write -c tartu --390 -d drive:0 -i tartu.img` | ||||
|   - `fluxengine write -c tartu --780 -d drive:0 -i tartu.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -26,11 +26,11 @@ FluxEngine will read and write these (but only the DSDD MFM variant). | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read tids990 -s drive:0 -o tids990.img` | ||||
|   - `fluxengine read -c tids990 -s drive:0 -o tids990.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write tids990 -d drive:0 -i tids990.img` | ||||
|   - `fluxengine write -c tids990 -d drive:0 -i tids990.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -20,8 +20,8 @@ on the precise format. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read tiki --90 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read tiki --200 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read tiki --400 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read tiki --800 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --90 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --200 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --400 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --800 -s drive:0 -o tiki.img` | ||||
|  | ||||
|   | ||||
| @@ -46,13 +46,13 @@ FluxEngine can read and write both the single-sided and double-sided variants. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read victor9k --612 -s drive:0 -o victor9k.img` | ||||
|   - `fluxengine read victor9k --1224 -s drive:0 -o victor9k.img` | ||||
|   - `fluxengine read -c victor9k --612 -s drive:0 -o victor9k.img` | ||||
|   - `fluxengine read -c victor9k --1224 -s drive:0 -o victor9k.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write victor9k --612 -d drive:0 -i victor9k.img` | ||||
|   - `fluxengine write victor9k --1224 -d drive:0 -i victor9k.img` | ||||
|   - `fluxengine write -c victor9k --612 -d drive:0 -i victor9k.img` | ||||
|   - `fluxengine write -c victor9k --1224 -d drive:0 -i victor9k.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -31,7 +31,7 @@ system. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read zilogmcz -s drive:0 -o zilogmcz.img` | ||||
|   - `fluxengine read -c zilogmcz -s drive:0 -o zilogmcz.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -36,10 +36,10 @@ Forty track formats on a forty track drive | ||||
| ------------------------------------------ | ||||
|  | ||||
| If you actually have a forty track drive, you need to tell FluxEngine. This is | ||||
| done by adding the special profile `40track_drive`: | ||||
| done by adding `--drivetype=40`: | ||||
|  | ||||
| ``` | ||||
| fluxengine write ibm --360 40track_drive -i image.img -d drive:0 | ||||
| fluxengine write -c ibm --360 --drivetype=40 -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| It should then Just Work. This is supported by both FluxEngine and Greaseweazle | ||||
| @@ -47,24 +47,6 @@ hardware. | ||||
|  | ||||
| Obviously you can't write an eighty-track format using a forty-track drive! | ||||
|  | ||||
| Apple II drives | ||||
| --------------- | ||||
|  | ||||
| The Apple II had special drives which supported microstepping: when commanded | ||||
| to move the head, then instead of moving in single-track steps as is done in | ||||
| most other drives, the Apple II drive would move in quarter-track steps. This | ||||
| allowed much less precise head alignment, as small errors could be corrected in | ||||
| software. (The Brother word processor drives were similar.) The bus interface | ||||
| is different from normal PC drives. | ||||
|  | ||||
| The FluxEngine client supports these with the `apple2_drive` profile: | ||||
|  | ||||
| ``` | ||||
| fluxengine write apple2 apple2_drive -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| This is supported only by Greaseweazle hardware. | ||||
|  | ||||
| Shugart drives | ||||
| -------------- | ||||
|  | ||||
| @@ -77,14 +59,32 @@ the drives must be jumpered to configure them. This was mostly used by older | ||||
| 3.5" drives, such as those on the Atari ST. [the How It Works | ||||
| page](technical.md) for the pinout. | ||||
|  | ||||
| The FluxEngine client supports these with the `shugart_drive` profile: | ||||
| The FluxEngine client supports these with `--bus=shugart`: | ||||
|  | ||||
| ``` | ||||
| fluxengine write atarist720 shugart_drive -i image.img -d drive:0 | ||||
| fluxengine write -c atarist720 --bus=shugart -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| (If you have a 40-track Shugart drive, use _both_ `shugart_drive` and | ||||
| `40track_drive`.) | ||||
| (If you have a 40-track Shugart drive, use _both_ `--bus=shugart` and | ||||
| `--drivetype=40`.) | ||||
|  | ||||
| This is supported only by Greaseweazle hardware. | ||||
|  | ||||
| Apple II drives | ||||
| --------------- | ||||
|  | ||||
| The Apple II had special drives which supported microstepping: when commanded | ||||
| to move the head, then instead of moving in single-track steps as is done in | ||||
| most other drives, the Apple II drive would move in quarter-track steps. This | ||||
| allowed much less precise head alignment, as small errors could be corrected in | ||||
| software. (The Brother word processor drives were similar.) The bus interface | ||||
| is different from normal PC drives. | ||||
|  | ||||
| The FluxEngine client supports these with `--drivetype=160 --bus=appleii`. | ||||
|  | ||||
| ``` | ||||
| fluxengine write -c apple2 --drivetype=160 --bus=appleii -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| This is supported only by Greaseweazle hardware. | ||||
|  | ||||
|   | ||||
							
								
								
									
										48
									
								
								doc/using.md
									
									
									
									
									
								
							
							
						
						
									
										48
									
								
								doc/using.md
									
									
									
									
									
								
							| @@ -15,7 +15,7 @@ If possible, try using the GUI, which should provide simplified access for most | ||||
| common operations. | ||||
|  | ||||
| <div style="text-align: center"> | ||||
| <a href="doc/screenshot-details.png"><img src="doc/screenshot-details.png" style="width:60%" alt="screenshot of the GUI in action"></a> | ||||
| <a href="screenshot-details.png"><img src="screenshot-details.png" style="width:60%" alt="screenshot of the GUI in action"></a> | ||||
| </div> | ||||
|  | ||||
| ### Core concepts | ||||
| @@ -82,16 +82,16 @@ Here are some sample invocations: | ||||
| ``` | ||||
| # Read an PC 1440kB disk, producing a disk image with the default name | ||||
| # (ibm.img) | ||||
| $ fluxengine read ibm --1440 | ||||
| $ fluxengine read -c ibm --1440 | ||||
|  | ||||
| # Write a PC 1440kB disk to drive 1 | ||||
| $ fluxengine write ibm --1440 -i image.img -d drive:1 | ||||
| $ fluxengine write -c ibm --1440 -i image.img -d drive:1 | ||||
|  | ||||
| # Read a Eco1 CP/M disk, making a copy of the flux into a file | ||||
| $ fluxengine read eco1 --copy-flux-to copy.flux -o eco1.ldbs | ||||
| $ fluxengine read -c eco1 --copy-flux-to copy.flux -o eco1.ldbs | ||||
|  | ||||
| # Rerun the decode from the flux file, tweaking the parameters | ||||
| $ fluxengine read eco1 -s copy.flux -o eco1.ldbs --cylinders=1 | ||||
| $ fluxengine read -c eco1 -s copy.flux -o eco1.ldbs --cylinders=1 | ||||
| ``` | ||||
|  | ||||
| ### Configuration | ||||
| @@ -108,13 +108,13 @@ encoder { | ||||
|     } | ||||
|   } | ||||
| } | ||||
| $ fluxengine write ibm --1440 config.textpb -i image.img | ||||
| $ fluxengine write -c ibm --1440 -c config.textpb -i image.img | ||||
| ``` | ||||
|  | ||||
| ...or you can specify them on the command line: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine write ibm --1440 -i image.img --encoder.ibm.trackdata.emit_iam=false | ||||
| $ fluxengine write -c ibm --1440 -i image.img --encoder.ibm.trackdata.emit_iam=false | ||||
| ``` | ||||
|  | ||||
| Both the above invocations are equivalent. The text files use [Google's | ||||
| @@ -128,7 +128,7 @@ files as you wish; they are all merged left to right.  You can see all these | ||||
| settings by doing: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine write ibm --1440 --config | ||||
| $ fluxengine write -c ibm --1440 --show-config | ||||
| ``` | ||||
|  | ||||
| The `--config` option will cause the current configuration to be dumped to the | ||||
| @@ -146,40 +146,26 @@ different task. Run each one with `--help` to get a full list of | ||||
| (non-configuration-setting) options; this describes only basic usage of the | ||||
| more common tools. | ||||
|  | ||||
|   - `fluxengine read <profile> <options> -s <flux source> -o <image output>` | ||||
|   - `fluxengine read -c <profile> <options> -s <flux source> -o <image output>` | ||||
|  | ||||
|     Reads flux (possibly from a disk) and decodes it into a file system image. | ||||
|     `<profile>` is a reference to an internal input configuration file | ||||
|     describing the format. `<options>` may be any combination of options | ||||
|     defined by the profile. | ||||
|  | ||||
|   - `fluxengine write <profile> -i <image input> -d <flux destination>` | ||||
|   - `fluxengine write -c <profile> -i <image input> -d <flux destination>` | ||||
|  | ||||
|     Reads a filesystem image and encodes it into flux (possibly writing to a | ||||
|     disk). `<profile>` is a reference to an internal output configuration file | ||||
|     describing the format. | ||||
|  | ||||
|   - `fluxengine rawread -s <flux source> -d <flux destination>` | ||||
|  | ||||
|     Reads flux (possibly from a disk) and writes it to a flux file without doing | ||||
|     any decoding. You can specify a profile if you want to read a subset of the | ||||
|     disk. | ||||
|  | ||||
|   - `fluxengine rawwrite -s <flux source> -d <flux destination>` | ||||
|  | ||||
|     Reads flux from a file and writes it (possibly to a disk) without doing any | ||||
|     encoding. You can specify a profile if you want to write a subset of the | ||||
|     disk. | ||||
|  | ||||
|   - `fluxengine merge -s <fluxfile> -s <fluxfile...> -d <fluxfile` | ||||
|  | ||||
|     Merges data from multiple flux files together. This is useful if you have | ||||
|     several reads from an unreliable disk where each read has a different set | ||||
|     of good sectors. By merging the flux files, you get to combine all the | ||||
|     data. Don't use this on reads of different disks, for obvious results! Note | ||||
|     that this works on flux files, not on flux sources. | ||||
|  | ||||
|   - `fluxengine inspect -s <flux source> -c <cylinder> -h <head> -B` | ||||
|   - `fluxengine inspect -s <flux source> -t <track> -h <head> -B` | ||||
|  | ||||
|     Reads flux (possibly from a disk) and does various analyses of it to try and | ||||
|     detect the clock rate, display raw flux information, examine the underlying | ||||
| @@ -198,14 +184,8 @@ more common tools. | ||||
|  | ||||
| There are other tools; try `fluxengine --help`. | ||||
|  | ||||
| **Important note on `rawread` and `rawwrite`:** You can't use these tools to | ||||
| **Important note on `rawwrite`:** You can't use theis tool to | ||||
| copy disks, in most circumstances. See [the FAQ](faq.md) for more information. | ||||
| Also, `rawread` is not guaranteed to read correctly. Floppy disks are | ||||
| fundamentally unreliable, and random bit errors may occur at any time; these | ||||
| can only be detected by performing a decode and verifying the checksums on the | ||||
| sectors. To perform a correct read, it's recommended to do `fluxengine read` | ||||
| with the `--copy-flux-to` option, to perform a decode to a filesystem image | ||||
| while also writing to a flux file. | ||||
|  | ||||
| ### Flux sources and destinations | ||||
|  | ||||
| @@ -489,7 +469,7 @@ containing valuable historical data, and you want to read them. | ||||
| Typically I do this: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine read brother240 -s drive:0 -o brother.img --copy-flux-to=brother.flux --decoder.write_csv_to=brother.csv | ||||
| $ fluxengine read -c brother240 -s drive:0 -o brother.img --copy-flux-to=brother.flux --decoder.write_csv_to=brother.csv | ||||
| ``` | ||||
|  | ||||
| This will read the disk in drive 0 and write out an information CSV file. It'll | ||||
| @@ -499,7 +479,7 @@ settings, I can rerun the decode without having to physically touch the disk | ||||
| like this: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine read brother -s brother.flux -o brother.img --decoder.write_csv_to=brother.csv | ||||
| $ fluxengine read -c brother -s brother.flux -o brother.img --decoder.write_csv_to=brother.csv | ||||
| ``` | ||||
|  | ||||
| Apart from being drastically faster, this avoids touching the (potentially | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from build.c import clibrary | ||||
| from build.zip import zip | ||||
| from glob import glob | ||||
| from os.path import * | ||||
| import config | ||||
|  | ||||
| icons = ["fluxfile", "hardware", "icon", "imagefile"] | ||||
|  | ||||
| @@ -17,37 +18,37 @@ clibrary( | ||||
|     }, | ||||
| ) | ||||
|  | ||||
| simplerule( | ||||
|     name="fluxengine_icns", | ||||
|     ins=["./icon.png"], | ||||
|     outs=["=fluxengine.icns"], | ||||
|     commands=[ | ||||
|         "mkdir -p fluxengine.iconset", | ||||
|         "sips -z 64 64 $[ins[0]] --out fluxengine.iconset/icon_32x32@2x.png > /dev/null", | ||||
|         "iconutil -c icns -o $[outs[0]] fluxengine.iconset", | ||||
|     ], | ||||
|     label="ICONSET", | ||||
| ) | ||||
|  | ||||
| simplerule( | ||||
|     name="fluxengine_ico", | ||||
|     ins=["./icon.png"], | ||||
|     outs=["=fluxengine.ico"], | ||||
|     commands=["png2ico $[outs[0]] $[ins[0]]"], | ||||
|     label="MAKEICON", | ||||
| ) | ||||
|  | ||||
| template_files = [ | ||||
|     f | ||||
|     for f in glob( | ||||
|         "**", recursive=True, root_dir="extras/FluxEngine.app.template" | ||||
| if config.osx: | ||||
|     simplerule( | ||||
|         name="fluxengine_icns", | ||||
|         ins=["./icon.png"], | ||||
|         outs=["=fluxengine.icns"], | ||||
|         commands=[ | ||||
|             "mkdir -p fluxengine.iconset", | ||||
|             "sips -z 64 64 $[ins[0]] --out fluxengine.iconset/icon_32x32@2x.png > /dev/null", | ||||
|             "iconutil -c icns -o $[outs[0]] fluxengine.iconset", | ||||
|         ], | ||||
|         label="ICONSET", | ||||
|     ) | ||||
|  | ||||
|     template_files = [ | ||||
|         f | ||||
|         for f in glob("**", recursive=True, root_dir="extras/FluxEngine.app.template") | ||||
|         if isfile(join("extras/FluxEngine.app.template", f)) | ||||
|     ] | ||||
|     zip( | ||||
|         name="fluxengine_template", | ||||
|         items={ | ||||
|             join("FluxEngine.app", k): join("extras/FluxEngine.app.template", k) | ||||
|             for k in template_files | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
| if config.windows: | ||||
|     simplerule( | ||||
|         name="fluxengine_ico", | ||||
|         ins=["./icon.png"], | ||||
|         outs=["=fluxengine.ico"], | ||||
|         commands=["png2ico $[outs[0]] $[ins[0]]"], | ||||
|         label="MAKEICON", | ||||
|     ) | ||||
|     if isfile(join("extras/FluxEngine.app.template", f)) | ||||
| ] | ||||
| zip( | ||||
|     name="fluxengine_template", | ||||
|     items={ | ||||
|         join("FluxEngine.app", k): join("extras/FluxEngine.app.template", k) | ||||
|         for k in template_files | ||||
|     }, | ||||
| ) | ||||
|   | ||||
| @@ -741,38 +741,3 @@ void readDiskCommand( | ||||
|             *diskflux->image, globalConfig()->decoder().write_csv_to()); | ||||
|     writer.writeImage(*diskflux->image); | ||||
| } | ||||
|  | ||||
| void rawReadDiskCommand(FluxSource& fluxsource, FluxSink& fluxsink) | ||||
| { | ||||
|     log(BeginOperationLogMessage{"Performing raw read of disk"}); | ||||
|  | ||||
|     if (fluxsource.isHardware() || fluxsink.isHardware()) | ||||
|         measureDiskRotation(); | ||||
|     auto physicalLocations = Layout::computePhysicalLocations(); | ||||
|     unsigned index = 0; | ||||
|     for (const auto& physicalLocation : physicalLocations) | ||||
|     { | ||||
|         log(OperationProgressLogMessage{ | ||||
|             index * 100 / (int)physicalLocations.size()}); | ||||
|         index++; | ||||
|  | ||||
|         testForEmergencyStop(); | ||||
|         auto trackInfo = Layout::getLayoutOfTrackPhysical( | ||||
|             physicalLocation.cylinder, physicalLocation.head); | ||||
|         auto fluxSourceIterator = fluxsource.readFlux( | ||||
|             trackInfo->physicalTrack, trackInfo->physicalSide); | ||||
|  | ||||
|         log(BeginReadOperationLogMessage{ | ||||
|             trackInfo->physicalTrack, trackInfo->physicalSide}); | ||||
|         auto fluxmap = fluxSourceIterator->next(); | ||||
|         log(EndReadOperationLogMessage()); | ||||
|         log("{0} ms in {1} bytes", | ||||
|             (int)(fluxmap->duration() / 1e6), | ||||
|             fluxmap->bytes()); | ||||
|  | ||||
|         fluxsink.writeFlux( | ||||
|             trackInfo->physicalTrack, trackInfo->physicalSide, *fluxmap); | ||||
|     } | ||||
|  | ||||
|     log(EndOperationLogMessage{"Raw read complete"}); | ||||
| } | ||||
|   | ||||
| @@ -111,6 +111,5 @@ extern std::shared_ptr<const DiskFlux> readDiskCommand( | ||||
|     FluxSource& fluxsource, Decoder& decoder); | ||||
| extern void readDiskCommand( | ||||
|     FluxSource& source, Decoder& decoder, ImageWriter& writer); | ||||
| extern void rawReadDiskCommand(FluxSource& source, FluxSink& sink); | ||||
|  | ||||
| #endif | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| #include "lib/core/utils.h" | ||||
| #include <fstream> | ||||
| #include <google/protobuf/text_format.h> | ||||
| #include <fmt/ranges.h> | ||||
|  | ||||
| static Config config; | ||||
|  | ||||
| @@ -181,35 +182,8 @@ ConfigProto* Config::combined() | ||||
|     { | ||||
|         _combinedConfig = _baseConfig; | ||||
|  | ||||
|         /* First apply any standalone options. */ | ||||
|  | ||||
|         std::set<std::string> options = _appliedOptions; | ||||
|         for (const auto& option : _baseConfig.option()) | ||||
|         { | ||||
|             if (options.find(option.name()) != options.end()) | ||||
|             { | ||||
|                 _combinedConfig.MergeFrom(option.config()); | ||||
|                 options.erase(option.name()); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         /* Then apply any group options. */ | ||||
|  | ||||
|         for (auto& group : _baseConfig.option_group()) | ||||
|         { | ||||
|             const OptionProto* selectedOption = &*group.option().begin(); | ||||
|  | ||||
|             for (auto& option : group.option()) | ||||
|             { | ||||
|                 if (options.find(option.name()) != options.end()) | ||||
|                 { | ||||
|                     selectedOption = &option; | ||||
|                     options.erase(option.name()); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             _combinedConfig.MergeFrom(selectedOption->config()); | ||||
|         } | ||||
|         for (const auto& optionInfo : _appliedOptions) | ||||
|             _combinedConfig.MergeFrom(optionInfo.option->config()); | ||||
|  | ||||
|         /* Add in the user overrides. */ | ||||
|  | ||||
| @@ -241,51 +215,27 @@ std::vector<std::string> Config::validate() | ||||
| { | ||||
|     std::vector<std::string> results; | ||||
|  | ||||
|     std::set<std::string> optionNames = _appliedOptions; | ||||
|     std::set<const OptionProto*> appliedOptions; | ||||
|     for (const auto& option : _baseConfig.option()) | ||||
|     { | ||||
|         if (optionNames.find(option.name()) != optionNames.end()) | ||||
|     /* Ensure that only one item in each group is set. */ | ||||
|  | ||||
|     std::map<const OptionGroupProto*, const OptionProto*> optionsByGroup; | ||||
|     for (auto [group, option, hasArgument] : _appliedOptions) | ||||
|         if (group) | ||||
|         { | ||||
|             appliedOptions.insert(&option); | ||||
|             optionNames.erase(option.name()); | ||||
|             auto& o = optionsByGroup[group]; | ||||
|             if (o) | ||||
|                 results.push_back( | ||||
|                     fmt::format("multiple mutually exclusive values set for " | ||||
|                                 "group '{}': valid values are: {}", | ||||
|                         group->comment(), | ||||
|                         fmt::join(std::views::transform( | ||||
|                                       group->option(), &OptionProto::name), | ||||
|                             ", "))); | ||||
|             o = option; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /* Then apply any group options. */ | ||||
|  | ||||
|     for (auto& group : _baseConfig.option_group()) | ||||
|     { | ||||
|         int count = 0; | ||||
|  | ||||
|         for (auto& option : group.option()) | ||||
|         { | ||||
|             if (optionNames.find(option.name()) != optionNames.end()) | ||||
|             { | ||||
|                 optionNames.erase(option.name()); | ||||
|                 appliedOptions.insert(&option); | ||||
|  | ||||
|                 count++; | ||||
|                 if (count == 2) | ||||
|                     results.push_back( | ||||
|                         fmt::format("multiple mutually exclusive options set " | ||||
|                                     "for group '{}'", | ||||
|                             group.comment())); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /* Check for unknown options. */ | ||||
|  | ||||
|     if (!optionNames.empty()) | ||||
|     { | ||||
|         for (auto& name : optionNames) | ||||
|             results.push_back(fmt::format("'{}' is not a known option", name)); | ||||
|     } | ||||
|  | ||||
|     /* Check option requirements. */ | ||||
|  | ||||
|     for (auto& option : appliedOptions) | ||||
|     for (auto [group, option, hasArgument] : _appliedOptions) | ||||
|     { | ||||
|         try | ||||
|         { | ||||
| @@ -360,11 +310,12 @@ void Config::readBaseConfig(std::string data) | ||||
|         error("couldn't load external config proto"); | ||||
| } | ||||
|  | ||||
| const OptionProto& Config::findOption(const std::string& optionName) | ||||
| Config::OptionInfo Config::findOption( | ||||
|     const std::string& name, const std::string value) | ||||
| { | ||||
|     const OptionProto* found = nullptr; | ||||
|  | ||||
|     auto searchOptionList = [&](auto& optionList) | ||||
|     auto searchOptionList = [&](auto& optionList, const std::string& optionName) | ||||
|     { | ||||
|         for (const auto& option : optionList) | ||||
|         { | ||||
| @@ -377,17 +328,39 @@ const OptionProto& Config::findOption(const std::string& optionName) | ||||
|         return false; | ||||
|     }; | ||||
|  | ||||
|     if (searchOptionList(base()->option())) | ||||
|         return *found; | ||||
|     /* First look for any group names which match. */ | ||||
|  | ||||
|     if (!value.empty()) | ||||
|         for (const auto& optionGroup : base()->option_group()) | ||||
|             if (optionGroup.name() == name) | ||||
|             { | ||||
|                 /* The option must therefore be one of these. */ | ||||
|  | ||||
|                 if (searchOptionList(optionGroup.option(), value)) | ||||
|                     return {&optionGroup, found, true}; | ||||
|  | ||||
|                 throw OptionNotFoundException(fmt::format( | ||||
|                     "value {} is not valid for option {}; valid values are: {}", | ||||
|                     value, | ||||
|                     name, | ||||
|                     fmt::join(std::views::transform( | ||||
|                                   optionGroup.option(), &OptionProto::name), | ||||
|                         ", "))); | ||||
|             } | ||||
|  | ||||
|     /* Now search for individual options. */ | ||||
|  | ||||
|     if (searchOptionList(base()->option(), name)) | ||||
|         return {nullptr, found, false}; | ||||
|  | ||||
|     for (const auto& optionGroup : base()->option_group()) | ||||
|     { | ||||
|         if (searchOptionList(optionGroup.option())) | ||||
|             return *found; | ||||
|         if (optionGroup.name().empty()) | ||||
|             if (searchOptionList(optionGroup.option(), name)) | ||||
|                 return {nullptr, found, false}; | ||||
|     } | ||||
|  | ||||
|     throw OptionNotFoundException( | ||||
|         fmt::format("option {} not found", optionName)); | ||||
|     throw OptionNotFoundException(fmt::format("option {} not found", name)); | ||||
| } | ||||
|  | ||||
| void Config::checkOptionValid(const OptionProto& option) | ||||
| @@ -445,22 +418,20 @@ bool Config::isOptionValid(const OptionProto& option) | ||||
|     } | ||||
| } | ||||
|  | ||||
| bool Config::isOptionValid(std::string option) | ||||
| { | ||||
|     return isOptionValid(findOption(option)); | ||||
| } | ||||
|  | ||||
| void Config::applyOption(const OptionProto& option) | ||||
| void Config::applyOption(const OptionInfo& optionInfo) | ||||
| { | ||||
|     auto* option = optionInfo.option; | ||||
|     log(OptionLogMessage{ | ||||
|         option.has_message() ? option.message() : option.comment()}); | ||||
|         option->has_message() ? option->message() : option->comment()}); | ||||
|  | ||||
|     _appliedOptions.insert(option.name()); | ||||
|     _appliedOptions.insert(optionInfo); | ||||
| } | ||||
|  | ||||
| void Config::applyOption(std::string option) | ||||
| bool Config::applyOption(const std::string& name, const std::string value) | ||||
| { | ||||
|     applyOption(findOption(option)); | ||||
|     auto optionInfo = findOption(name, value); | ||||
|     applyOption(optionInfo); | ||||
|     return optionInfo.usesValue; | ||||
| } | ||||
|  | ||||
| void Config::clearOptions() | ||||
|   | ||||
| @@ -66,6 +66,18 @@ struct FluxConstructor | ||||
|  | ||||
| class Config | ||||
| { | ||||
| private: | ||||
|     struct OptionInfo | ||||
|     { | ||||
|         bool operator==(const OptionInfo& other) const = default; | ||||
|         std::strong_ordering operator<=>( | ||||
|             const OptionInfo& other) const = default; | ||||
|  | ||||
|         const OptionGroupProto* group; | ||||
|         const OptionProto* option; | ||||
|         bool usesValue; | ||||
|     }; | ||||
|  | ||||
| public: | ||||
|     /* Direct access to the various proto layers. */ | ||||
|  | ||||
| @@ -124,12 +136,12 @@ public: | ||||
|     /* Option management: look up an option by name, determine whether an option | ||||
|      * is valid, and apply an option. */ | ||||
|  | ||||
|     const OptionProto& findOption(const std::string& option); | ||||
|     OptionInfo findOption( | ||||
|         const std::string& name, const std::string value = ""); | ||||
|     void checkOptionValid(const OptionProto& option); | ||||
|     bool isOptionValid(const OptionProto& option); | ||||
|     bool isOptionValid(std::string option); | ||||
|     void applyOption(const OptionProto& option); | ||||
|     void applyOption(std::string option); | ||||
|     void applyOption(const OptionInfo& optionInfo); | ||||
|     bool applyOption(const std::string& name, const std::string value = ""); | ||||
|     void clearOptions(); | ||||
|  | ||||
|     /* Adjust overall inputs and outputs. */ | ||||
| @@ -165,7 +177,7 @@ private: | ||||
|     ConfigProto _baseConfig; | ||||
|     ConfigProto _overridesConfig; | ||||
|     ConfigProto _combinedConfig; | ||||
|     std::set<std::string> _appliedOptions; | ||||
|     std::set<OptionInfo> _appliedOptions; | ||||
|     bool _configValid; | ||||
|  | ||||
|     FluxSourceProto _verificationFluxSourceProto; | ||||
|   | ||||
| @@ -73,5 +73,6 @@ message OptionProto | ||||
| message OptionGroupProto | ||||
| { | ||||
|     optional string comment = 1 [(help) = "help text for option group"]; | ||||
|     repeated OptionProto option = 2; | ||||
|     optional string name = 2 [(help) = "option group name"]; | ||||
|     repeated OptionProto option = 3; | ||||
| } | ||||
|   | ||||
| @@ -13,17 +13,23 @@ static std::vector<Flag*> all_flags; | ||||
| static std::map<const std::string, Flag*> flags_by_name; | ||||
|  | ||||
| static void doHelp(); | ||||
| static void doLoadConfig(const std::string& filename); | ||||
| static void doShowConfig(); | ||||
| static void doDoc(); | ||||
|  | ||||
| static FlagGroup helpGroup; | ||||
| static ActionFlag helpFlag = ActionFlag({"--help"}, "Shows the help.", doHelp); | ||||
|  | ||||
| static ActionFlag showConfigFlag = ActionFlag({"--config", "-C"}, | ||||
| static FlagGroup configGroup; | ||||
| static ActionFlag loadConfigFlag({"--config", "-c"}, | ||||
|     "Reads an internal or external configuration file.", | ||||
|     doLoadConfig); | ||||
|  | ||||
| static ActionFlag showConfigFlag({"--show-config", "-C"}, | ||||
|     "Shows the currently set configuration and halts.", | ||||
|     doShowConfig); | ||||
|  | ||||
| static ActionFlag docFlag = ActionFlag( | ||||
| static ActionFlag docFlag( | ||||
|     {"--doc"}, "Shows the available configuration options and halts.", doDoc); | ||||
|  | ||||
| FlagGroup::FlagGroup() | ||||
| @@ -152,7 +158,7 @@ std::vector<std::string> FlagGroup::parseFlagsWithFilenames(int argc, | ||||
|                         index += usesthat; | ||||
|                     } | ||||
|                     else | ||||
|                         globalConfig().applyOption(path); | ||||
|                         usesthat = globalConfig().applyOption(path, value); | ||||
|                 } | ||||
|                 else | ||||
|                     error("unrecognised flag '-{}'; try --help", key); | ||||
| @@ -182,17 +188,17 @@ void FlagGroup::parseFlags(int argc, | ||||
|             "non-option parameter '{}' seen (try --help)", *filenames.begin()); | ||||
| } | ||||
|  | ||||
| static void doLoadConfig(const std::string& filename) | ||||
| { | ||||
|     globalConfig().readBaseConfigFile(filename); | ||||
| } | ||||
|  | ||||
| void FlagGroup::parseFlagsWithConfigFiles(int argc, | ||||
|     const char* argv[], | ||||
|     const std::map<std::string, const ConfigProto*>& configFiles) | ||||
| { | ||||
|     parseFlags(argc, | ||||
|         argv, | ||||
|         [&](const auto& filename) | ||||
|         { | ||||
|             globalConfig().readBaseConfigFile(filename); | ||||
|             return true; | ||||
|         }); | ||||
|     globalConfig().readBaseConfigFile("_global_options"); | ||||
|     FlagGroup({this, &configGroup}).parseFlags(argc, argv); | ||||
| } | ||||
|  | ||||
| void FlagGroup::checkInitialised() const | ||||
|   | ||||
| @@ -83,13 +83,23 @@ public: | ||||
|         const std::string helptext, | ||||
|         std::function<void(void)> callback): | ||||
|         Flag(names, helptext), | ||||
|         _callback(callback) | ||||
|         _voidCallback(callback), | ||||
|         _hasArgument(false) | ||||
|     { | ||||
|     } | ||||
|  | ||||
|     ActionFlag(const std::vector<std::string>& names, | ||||
|         const std::string helptext, | ||||
|         std::function<void(const std::string&)> callback): | ||||
|         Flag(names, helptext), | ||||
|         _callback(callback), | ||||
|         _hasArgument(true) | ||||
|     { | ||||
|     } | ||||
|  | ||||
|     bool hasArgument() const override | ||||
|     { | ||||
|         return false; | ||||
|         return _hasArgument; | ||||
|     } | ||||
|  | ||||
|     const std::string defaultValueAsString() const override | ||||
| @@ -99,11 +109,16 @@ public: | ||||
|  | ||||
|     void set(const std::string& value) override | ||||
|     { | ||||
|         _callback(); | ||||
|         if (_hasArgument) | ||||
|             _callback(value); | ||||
|         else | ||||
|             _voidCallback(); | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     const std::function<void(void)> _callback; | ||||
|     const std::function<void(const std::string&)> _callback; | ||||
|     const std::function<void(void)> _voidCallback; | ||||
|     bool _hasArgument; | ||||
| }; | ||||
|  | ||||
| class SettableFlag : public Flag | ||||
|   | ||||
| @@ -439,7 +439,7 @@ std::string ProtoField::get() const | ||||
|             case google::protobuf::FieldDescriptor::TYPE_ENUM: | ||||
|             { | ||||
|                 const auto* enumvalue = reflection->GetEnum(*_message, _field); | ||||
|                 return enumvalue->name(); | ||||
|                 return (std::string)enumvalue->name(); | ||||
|             } | ||||
|  | ||||
|             case google::protobuf::FieldDescriptor::TYPE_MESSAGE: | ||||
| @@ -534,7 +534,7 @@ findAllPossibleProtoFields(const google::protobuf::Descriptor* descriptor) | ||||
|         for (int i = 0; i < d->field_count(); i++) | ||||
|         { | ||||
|             const google::protobuf::FieldDescriptor* f = d->field(i); | ||||
|             std::string n = s + f->name(); | ||||
|             std::string n = s + (std::string)f->name(); | ||||
|  | ||||
|             if (f->label() == google::protobuf::FieldDescriptor::LABEL_REPEATED) | ||||
|                 n += "[]"; | ||||
|   | ||||
| @@ -14,10 +14,10 @@ destfile=$dir/dest.img | ||||
|  | ||||
| dd if=/dev/urandom of=$srcfile bs=1048576 count=2 2>&1 | ||||
|  | ||||
| echo $fluxengine write $format -i $srcfile -d $fluxfile --drive.rotational_period_ms=200 $flags | ||||
| $fluxengine write $format -i $srcfile -d $fluxfile --drive.rotational_period_ms=200 $flags | ||||
| echo $fluxengine read $format -s $fluxfile -o $destfile --drive.rotational_period_ms=200 $flags | ||||
| $fluxengine read $format -s $fluxfile -o $destfile --drive.rotational_period_ms=200 $flags | ||||
| echo $fluxengine write -c $format -i $srcfile -d $fluxfile --drive.rotational_period_ms=200 $flags | ||||
| $fluxengine write -c $format -i $srcfile -d $fluxfile --drive.rotational_period_ms=200 $flags | ||||
| echo $fluxengine read -c $format -s $fluxfile -o $destfile --drive.rotational_period_ms=200 $flags | ||||
| $fluxengine read -c $format -s $fluxfile -o $destfile --drive.rotational_period_ms=200 $flags | ||||
| if [ ! -s $destfile ]; then | ||||
| 	echo "Zero length output file!" >&2 | ||||
| 	exit 1 | ||||
|   | ||||
| @@ -28,7 +28,7 @@ static void addExample(std::vector<std::string>& examples, | ||||
|     else | ||||
|         return; | ||||
|  | ||||
|     r += fmt::format(" {}", name); | ||||
|     r += fmt::format(" -c {}", name); | ||||
|     if (format) | ||||
|         r += fmt::format(" --{}", format->name()); | ||||
|  | ||||
|   | ||||
| @@ -19,7 +19,7 @@ static std::string supportStatus(SupportStatus status) | ||||
|             return ""; | ||||
|     } | ||||
|  | ||||
| 	return ""; | ||||
|     return ""; | ||||
| } | ||||
|  | ||||
| int main(int argc, const char* argv[]) | ||||
| @@ -43,7 +43,9 @@ int main(int argc, const char* argv[]) | ||||
|             { | ||||
|                 const auto* descriptor = | ||||
|                     FilesystemProto::FilesystemType_descriptor(); | ||||
|                 auto name = descriptor->FindValueByNumber(fs.type())->name(); | ||||
|                 auto name = | ||||
|                     (std::string)descriptor->FindValueByNumber(fs.type()) | ||||
|                         ->name(); | ||||
|  | ||||
|                 filesystems.insert(name); | ||||
|             } | ||||
|   | ||||
| @@ -4,11 +4,72 @@ | ||||
| #include <fstream> | ||||
| #include "fmt/format.h" | ||||
| #include "lib/core/globals.h" | ||||
| #include "lib/core/logger.h" | ||||
| #include "tests/testproto.pb.h" | ||||
| #include "lib/config/config.pb.h" | ||||
| #include <sstream> | ||||
| #include <locale> | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const BeginSpeedOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const EndSpeedOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const BeginWriteOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const EndWriteOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const BeginReadOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const EndReadOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const TrackReadLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const DiskReadLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const BeginOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const EndOperationLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const OperationProgressLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const OptionLogMessage> m) | ||||
| { | ||||
| } | ||||
|  | ||||
| const std::string protoname = STRINGIFY(PROTO); | ||||
|  | ||||
| static uint32_t readu8(std::string::iterator& it, std::string::iterator end) | ||||
|   | ||||
| @@ -19,11 +19,9 @@ cxxprogram( | ||||
|         "./fe-getfileinfo.cc", | ||||
|         "./fe-inspect.cc", | ||||
|         "./fe-ls.cc", | ||||
|         "./fe-merge.cc", | ||||
|         "./fe-mkdir.cc", | ||||
|         "./fe-mv.cc", | ||||
|         "./fe-putfile.cc", | ||||
|         "./fe-rawread.cc", | ||||
|         "./fe-rawwrite.cc", | ||||
|         "./fe-read.cc", | ||||
|         "./fe-rm.cc", | ||||
|   | ||||
| @@ -22,9 +22,7 @@ static StringFlag destFlux({"--dest", "-d"}, | ||||
|         globalConfig().setFluxSink(value); | ||||
|     }); | ||||
|  | ||||
| static IntFlag destTrack({"--cylinder", "-c"}, "track to write to", 0); | ||||
|  | ||||
| static IntFlag destHead({"--head", "-h"}, "head to write to", 0); | ||||
| static StringFlag destTracks({"--tracks", "-t"}, "tracks to write to", "c0h0"); | ||||
|  | ||||
| static DoubleFlag minInterval( | ||||
|     {"--min-interval-us"}, "Minimum pulse interval", 2.0); | ||||
| @@ -251,11 +249,14 @@ int mainAnalyseDriveResponse(int argc, const char* argv[]) | ||||
|  | ||||
|     if (globalConfig()->flux_sink().type() != FLUXTYPE_DRIVE) | ||||
|         error("this only makes sense with a real disk drive"); | ||||
|     auto tracks = parseCylinderHeadsString(destTracks); | ||||
|     if (tracks.size() != 1) | ||||
|         error("you must specify exactly one track"); | ||||
|  | ||||
|     usbSetDrive(globalConfig()->drive().drive(), | ||||
|         globalConfig()->drive().high_density(), | ||||
|         globalConfig()->drive().index_mode()); | ||||
|     usbSeek(destTrack); | ||||
|     usbSeek(tracks[0].cylinder); | ||||
|  | ||||
|     std::cout << "Measuring rotational speed...\n"; | ||||
|     nanoseconds_t period = usbGetRotationalPeriod(0); | ||||
| @@ -291,12 +292,12 @@ int mainAnalyseDriveResponse(int argc, const char* argv[]) | ||||
|                 outFluxmap.appendPulse(); | ||||
|             } | ||||
|  | ||||
|             usbWrite(destHead, outFluxmap.rawBytes(), 0); | ||||
|             usbWrite(tracks[0].head, outFluxmap.rawBytes(), 0); | ||||
|  | ||||
|             /* Read the test pattern in again. */ | ||||
|  | ||||
|             Fluxmap inFluxmap; | ||||
|             inFluxmap.appendBytes(usbRead(destHead, true, period, 0)); | ||||
|             inFluxmap.appendBytes(usbRead(tracks[0].head, true, period, 0)); | ||||
|  | ||||
|             /* Compute histogram. */ | ||||
|  | ||||
|   | ||||
| @@ -21,9 +21,7 @@ static StringFlag sourceFlux({"--source", "-s"}, | ||||
|         globalConfig().setFluxSource(value); | ||||
|     }); | ||||
|  | ||||
| static IntFlag trackFlag({"--cylinder", "-c"}, "Track to read.", 0); | ||||
|  | ||||
| static IntFlag headFlag({"--head", "-h"}, "Head to read.", 0); | ||||
| static StringFlag destTracks({"--tracks", "-t"}, "tracks to write to", "c0h0"); | ||||
|  | ||||
| static SettableFlag dumpFluxFlag( | ||||
|     {"--dump-flux", "-F"}, "Dump raw magnetic disk flux."); | ||||
| @@ -135,7 +133,10 @@ int mainInspect(int argc, const char* argv[]) | ||||
|     flags.parseFlagsWithConfigFiles(argc, argv, {}); | ||||
|  | ||||
|     auto fluxSource = FluxSource::create(globalConfig()); | ||||
|     const auto fluxmap = fluxSource->readFlux(trackFlag, headFlag)->next(); | ||||
|     auto tracks = parseCylinderHeadsString(destTracks); | ||||
|     if (tracks.size() != 1) | ||||
|         error("you must specify exactly one track"); | ||||
|     const auto fluxmap = fluxSource->readFlux(tracks[0])->next(); | ||||
|  | ||||
|     std::cout << fmt::format("0x{:x} bytes of data in {:.3f}ms\n", | ||||
|         fluxmap->bytes(), | ||||
|   | ||||
| @@ -1,64 +0,0 @@ | ||||
| #include "lib/core/globals.h" | ||||
| #include "lib/config/flags.h" | ||||
| #include "lib/data/fluxmap.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/config/proto.h" | ||||
| #include "lib/data/flux.h" | ||||
| #include "lib/external/fl2.h" | ||||
| #include "lib/external/fl2.pb.h" | ||||
| #include "src/fluxengine.h" | ||||
| #include <fstream> | ||||
|  | ||||
| static FlagGroup flags; | ||||
|  | ||||
| static std::vector<std::string> inputFluxFiles; | ||||
|  | ||||
| static StringFlag sourceFlux({"-s", "--source"}, | ||||
|     "flux file to read from (repeatable)", | ||||
|     "", | ||||
|     [](const auto& value) | ||||
|     { | ||||
|         inputFluxFiles.push_back(value); | ||||
|     }); | ||||
|  | ||||
| static StringFlag destFlux( | ||||
|     {"-d", "--dest"}, "destination flux file to write to", ""); | ||||
|  | ||||
| int mainMerge(int argc, const char* argv[]) | ||||
| { | ||||
|     flags.parseFlags(argc, argv); | ||||
|  | ||||
|     if (inputFluxFiles.empty()) | ||||
|         error("you must specify at least one input flux file (with -s)"); | ||||
|     if (destFlux.get() == "") | ||||
|         error("you must specify an output flux file (with -d)"); | ||||
|  | ||||
|     std::map<std::pair<int, int>, TrackFluxProto> data; | ||||
|     for (const auto& s : inputFluxFiles) | ||||
|     { | ||||
|         fmt::print("Reading {}...\n", s); | ||||
|         FluxFileProto f = loadFl2File(s); | ||||
|  | ||||
|         for (auto& trackflux : f.track()) | ||||
|         { | ||||
|             auto key = std::make_pair(trackflux.track(), trackflux.head()); | ||||
|             auto i = data.find(key); | ||||
|             if (i == data.end()) | ||||
|                 data[key] = trackflux; | ||||
|             else | ||||
|             { | ||||
|                 for (auto flux : trackflux.flux()) | ||||
|                     i->second.add_flux(flux); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     FluxFileProto proto; | ||||
|     for (auto& i : data) | ||||
|         *proto.add_track() = i.second; | ||||
|  | ||||
|     fmt::print("Writing {}...\n", destFlux.get()); | ||||
|     saveFl2File(destFlux.get(), proto); | ||||
|  | ||||
|     return 0; | ||||
| } | ||||
| @@ -1,52 +0,0 @@ | ||||
| #include "lib/core/globals.h" | ||||
| #include "lib/config/config.h" | ||||
| #include "lib/config/flags.h" | ||||
| #include "lib/algorithms/readerwriter.h" | ||||
| #include "lib/data/fluxmap.h" | ||||
| #include "lib/decoders/decoders.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/config/proto.h" | ||||
| #include "lib/fluxsink/fluxsink.h" | ||||
| #include "lib/fluxsource/fluxsource.h" | ||||
| #include "lib/imagewriter/imagewriter.h" | ||||
| #include "fluxengine.h" | ||||
| #include <google/protobuf/text_format.h> | ||||
| #include <fstream> | ||||
|  | ||||
| static FlagGroup flags; | ||||
|  | ||||
| static StringFlag sourceFlux({"-s", "--source"}, | ||||
|     "flux file to read from", | ||||
|     "", | ||||
|     [](const auto& value) | ||||
|     { | ||||
|         globalConfig().setFluxSource(value); | ||||
|     }); | ||||
|  | ||||
| static StringFlag destFlux({"-d", "--dest"}, | ||||
|     "destination flux file to write to", | ||||
|     "", | ||||
|     [](const auto& value) | ||||
|     { | ||||
|         globalConfig().setFluxSink(value); | ||||
|     }); | ||||
|  | ||||
| int mainRawRead(int argc, const char* argv[]) | ||||
| { | ||||
|     globalConfig().overrides()->set_tracks("0-79"); | ||||
|  | ||||
|     if (argc == 1) | ||||
|         showProfiles("rawread", formats); | ||||
|     globalConfig().overrides()->mutable_flux_source()->set_type(FLUXTYPE_DRIVE); | ||||
|     flags.parseFlagsWithConfigFiles(argc, argv, formats); | ||||
|  | ||||
|     if (globalConfig()->flux_sink().type() == FLUXTYPE_DRIVE) | ||||
|         error("you can't use rawread to write to hardware"); | ||||
|  | ||||
|     auto fluxSource = FluxSource::create(globalConfig()); | ||||
|     auto fluxSink = FluxSink::create(globalConfig()); | ||||
|  | ||||
|     rawReadDiskCommand(*fluxSource, *fluxSink); | ||||
|  | ||||
|     return 0; | ||||
| } | ||||
| @@ -16,7 +16,7 @@ static StringFlag sourceFlux({"-s", "--source"}, | ||||
|         globalConfig().setFluxSource(value); | ||||
|     }); | ||||
|  | ||||
| static IntFlag track({"--cylinder", "-c"}, "track to seek to", 0); | ||||
| static IntFlag track({"--cylinder", "-t"}, "track to seek to", 0); | ||||
|  | ||||
| extern const std::map<std::string, std::string> readables; | ||||
|  | ||||
|   | ||||
| @@ -16,11 +16,9 @@ extern command_cb mainGetFile; | ||||
| extern command_cb mainGetFileInfo; | ||||
| extern command_cb mainInspect; | ||||
| extern command_cb mainLs; | ||||
| extern command_cb mainMerge; | ||||
| extern command_cb mainMkDir; | ||||
| extern command_cb mainMv; | ||||
| extern command_cb mainPutFile; | ||||
| extern command_cb mainRawRead; | ||||
| extern command_cb mainRawWrite; | ||||
| extern command_cb mainRead; | ||||
| extern command_cb mainRm; | ||||
| @@ -51,10 +49,8 @@ static std::vector<Command> commands = | ||||
|     { "write",             mainWrite,             "Writes a sector image to a disk.", }, | ||||
| 	{ "fluxfile",          mainFluxfile,          "Flux file manipulation operations.", }, | ||||
| 	{ "format",            mainFormat,            "Format a disk and make a file system on it.", }, | ||||
| 	{ "rawread",           mainRawRead,           "Reads raw flux from a disk. Warning: you can't use this to copy disks.", }, | ||||
|     { "rawwrite",          mainRawWrite,          "Writes a flux file to a disk. Warning: you can't use this to copy disks.", }, | ||||
|     { "convert",           mainConvert,           "Converts a flux file from one format to another.", }, | ||||
| 	{ "merge",             mainMerge,             "Merge together multiple flux files.", }, | ||||
| 	{ "getdiskinfo",       mainGetDiskInfo,       "Read volume metadata off a disk (or image).", }, | ||||
| 	{ "ls",                mainLs,                "Show files on disk (or image).", }, | ||||
| 	{ "mv",                mainMv,                "Rename a file on a disk (or image).", }, | ||||
|   | ||||
| @@ -1,22 +0,0 @@ | ||||
| comment: 'Adjust configuration for a 40-track drive' | ||||
| is_extension: true | ||||
|  | ||||
| documentation: | ||||
| <<< | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to read from 40-track, 48tpi 5.25" drives. You have to tell it because there is | ||||
| no way to detect this automatically. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read ibm --180 40track_drive | ||||
| ``` | ||||
| >>> | ||||
|  | ||||
| drive { | ||||
|     tracks: "c0-40h0-1" | ||||
|     drive_type: DRIVETYPE_40TRACK | ||||
| } | ||||
|  | ||||
|  | ||||
							
								
								
									
										77
									
								
								src/formats/_global_options.textpb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								src/formats/_global_options.textpb
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | ||||
| comment: 'Options which can be applied everywhere.' | ||||
| is_extension: true | ||||
|  | ||||
| option_group { | ||||
| 	comment: "Drive type" | ||||
|     name: "drivetype" | ||||
|  | ||||
| 	option { | ||||
| 		name: "80" | ||||
| 		comment: '80 track drive' | ||||
|         set_by_default: true | ||||
|  | ||||
| 		config { | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	option { | ||||
| 		name: "40" | ||||
| 		comment: '40 track drive' | ||||
|  | ||||
| 		config { | ||||
|             drive { | ||||
|                 tracks: "c0-40h0-1" | ||||
|                 drive_type: DRIVETYPE_40TRACK | ||||
|             } | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	option { | ||||
| 		name: "160" | ||||
| 		comment: '160 track Apple II drive' | ||||
|  | ||||
| 		config { | ||||
|             drive { | ||||
|                 tracks: "c0-159h0" | ||||
|                 drive_type: DRIVETYPE_APPLE2 | ||||
|             } | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| option_group { | ||||
|     comment: 'Bus interface' | ||||
|     name: "bus" | ||||
|  | ||||
| 	option { | ||||
| 		name: "pc" | ||||
| 		comment: 'PC drive interface' | ||||
|         set_by_default: true | ||||
|     } | ||||
|  | ||||
| 	option { | ||||
| 		name: "shugart" | ||||
| 		comment: 'Shugart bus interface (only on Greaseweazle)' | ||||
|  | ||||
| 		config { | ||||
|             usb { | ||||
|                 greaseweazle { | ||||
|                     bus_type: SHUGART | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
| 	option { | ||||
| 		name: "appleii" | ||||
| 		comment: 'Apple II bus interface (only on Greaseweazle)' | ||||
|  | ||||
| 		config { | ||||
|             usb { | ||||
|                 greaseweazle { | ||||
|                     bus_type: APPLE2 | ||||
|                 } | ||||
|             } | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| @@ -1,29 +0,0 @@ | ||||
| comment: 'Adjust configuration for a 40-track Apple II drive' | ||||
| is_extension: true | ||||
|  | ||||
| documentation: | ||||
| <<< | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to adjust the pinout and track spacing to work with an Apple II | ||||
| drive.  This only works on Greaseweazle hardware and requires a custom | ||||
| connector. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read apple2 --160 apple2_drive | ||||
| ``` | ||||
| >>> | ||||
|  | ||||
| usb { | ||||
|     greaseweazle { | ||||
|         bus_type: APPLE2 | ||||
|     } | ||||
| } | ||||
|  | ||||
| drive { | ||||
|     tracks: "c0-159h0" | ||||
|     drive_type: DRIVETYPE_APPLE2 | ||||
| } | ||||
|  | ||||
|  | ||||
| @@ -3,14 +3,13 @@ from build.c import cxxlibrary | ||||
| from scripts.build import protoencode | ||||
|  | ||||
| formats = [ | ||||
|     "40track_drive", | ||||
|     "_global_options", | ||||
|     "acornadfs", | ||||
|     "acorndfs", | ||||
|     "aeslanier", | ||||
|     "agat", | ||||
|     "amiga", | ||||
|     "ampro", | ||||
|     "apple2_drive", | ||||
|     "apple2", | ||||
|     "atarist", | ||||
|     "bk", | ||||
| @@ -33,7 +32,6 @@ formats = [ | ||||
|     "psos", | ||||
|     "rolandd20", | ||||
|     "rx50", | ||||
|     "shugart_drive", | ||||
|     "smaky6", | ||||
|     "tartu", | ||||
|     "ti99", | ||||
|   | ||||
| @@ -12,8 +12,10 @@ of a whole generation of Estonian IT professionals. | ||||
|  | ||||
| The system uses dual 5.25 inch ИЗОТ ЕС5323 (IZOT ES5323) | ||||
| diskette drive with regular MFM encoded DSDD. The disks have | ||||
| a sector skew factor 2 and tracks start from outside of the | ||||
| diskette _for both sides_, which is a combination that somewhat | ||||
| a sector skew factor 2 and tracks are written on one side of | ||||
| the floppy until it is full and then continued on the other | ||||
| side, starting from the outside of the disk again. This differs | ||||
| from the most common alternating sides method and somewhat | ||||
| complicates reading CP/M filesystem content with common tools. | ||||
|  | ||||
| Mostly 800kB (786kB) DSDD disks were used, but there are also | ||||
| @@ -21,8 +23,8 @@ Mostly 800kB (786kB) DSDD disks were used, but there are also | ||||
|  | ||||
| ## References (all in Estonian) | ||||
|  | ||||
|   - [How to read the content of Juku disks?](https://github.com/infoaed/juku3000/blob/master/docs/kettad.md) | ||||
|   - [List of recovered Juku software](https://github.com/infoaed/juku3000/blob/master/docs/tarkvara-kataloog.md) | ||||
|   - [How to read/write Juku disk images?](https://j3k.infoaed.ee/kettad/) | ||||
|   - [List of recovered Juku software](https://j3k.infoaed.ee/tarkvara-kataloog/) | ||||
|   - [System disks for E5104](https://elektroonikamuuseum.ee/juku_arvuti_tarkvara.html) | ||||
| >>> | ||||
|  | ||||
|   | ||||
| @@ -1,22 +0,0 @@ | ||||
| comment: 'Adjust configuration for a Shugart drive' | ||||
| is_extension: true | ||||
|  | ||||
| documentation: | ||||
| <<< | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to adjust the pinout to work with a Shugart drive. This only works | ||||
| on Greaseweazle hardware. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read ibm --720 shugart_drive | ||||
| ``` | ||||
| >>> | ||||
|  | ||||
| usb { | ||||
|     greaseweazle { | ||||
|         bus_type: SHUGART | ||||
|     } | ||||
| } | ||||
|  | ||||
| @@ -1,19 +1,19 @@ | ||||
| from build.ab import emit, simplerule | ||||
| from build.ab import simplerule, G | ||||
| from build.c import cxxprogram | ||||
| from build.utils import shell, does_command_exist | ||||
| from glob import glob | ||||
| import config | ||||
|  | ||||
| emit( | ||||
|     """ | ||||
| WX_CONFIG ?= wx-config | ||||
| ifneq ($(strip $(shell command -v $(WX_CONFIG) >/dev/null 2>&1; echo $$?)),0) | ||||
| WX_CFLAGS = $(error Required binary 'wx-config' not found.) | ||||
| WX_LDFLAGS = $(error Required binary 'wx-config' not found.) | ||||
| else | ||||
| WX_CFLAGS := $(shell $(WX_CONFIG) --cxxflags base adv aui richtext core) | ||||
| WX_LDFLAGS := $(shell $(WX_CONFIG) --libs base adv aui richtext core) | ||||
| endif | ||||
| """ | ||||
| G.setdefault("WX_CONFIG", "wx-config") | ||||
| assert does_command_exist(G.WX_CONFIG), "Required binary 'wx-config' not found" | ||||
|  | ||||
| G.setdefault( | ||||
|     "WX_CFLAGS", | ||||
|     shell(f"{G.WX_CONFIG} --cxxflags base adv aui richtext core"), | ||||
| ) | ||||
| G.setdefault( | ||||
|     "WX_LDFLAGS", | ||||
|     shell(f"{G.WX_CONFIG} --libs base adv aui richtext core"), | ||||
| ) | ||||
|  | ||||
| extrasrcs = ["./layout.cpp"] | ||||
| @@ -57,13 +57,13 @@ cxxprogram( | ||||
|  | ||||
| if config.osx: | ||||
|     simplerule( | ||||
|         name="fluxengine_pkg", | ||||
|         name="fluxengine_app_zip", | ||||
|         ins=[ | ||||
|             ".+gui", | ||||
|             "extras+fluxengine_icns", | ||||
|             "extras+fluxengine_template", | ||||
|         ], | ||||
|         outs=["=FluxEngine.pkg"], | ||||
|         outs=["=FluxEngine.app.zip"], | ||||
|         commands=[ | ||||
|             "rm -rf $[outs[0]]", | ||||
|             "unzip -q $[ins[2]]",  # creates FluxEngine.app | ||||
| @@ -79,7 +79,20 @@ if config.osx: | ||||
|             "cp $$(brew --prefix abseil)/LICENSE FluxEngine.app/Contents/libs/abseil.txt", | ||||
|             "cp $$(brew --prefix libtiff)/LICENSE.md FluxEngine.app/Contents/libs/libtiff.txt", | ||||
|             "cp $$(brew --prefix zstd)/LICENSE FluxEngine.app/Contents/libs/zstd.txt", | ||||
|             "pkgbuild --quiet --install-location /Applications --component FluxEngine.app $[outs[0]]", | ||||
|             "zip -rq $[outs[0]] FluxEngine.app", | ||||
|         ], | ||||
|         label="MKAPP", | ||||
|     ) | ||||
|  | ||||
|     simplerule( | ||||
|         name="fluxengine_pkg", | ||||
|         ins=[ | ||||
|             ".+fluxengine_app_zip", | ||||
|         ], | ||||
|         outs=["=FluxEngine.pkg"], | ||||
|         commands=[ | ||||
|             "unzip -q $[ins[0]]", | ||||
|             "pkgbuild --quiet --install-location /Applications --component FluxEngine.app $[outs[0]]", | ||||
|         ], | ||||
|         label="MKPKG", | ||||
|     ) | ||||
|   | ||||
| @@ -64,11 +64,14 @@ export( | ||||
|                     "+protobuf_lib", | ||||
|                     "+protocol", | ||||
|                     ".+test_proto_lib", | ||||
|                     "dep/alphanum", | ||||
|                     "dep/snowhouse", | ||||
|                     "lib/algorithms", | ||||
|                     "lib/config", | ||||
|                     "lib/core", | ||||
|                     "lib/data", | ||||
|                     "lib/fluxsource+proto_lib", | ||||
|                     "dep/alphanum", | ||||
|                     "src/formats", | ||||
|                 ], | ||||
|             ), | ||||
|         ) | ||||
|   | ||||
| @@ -55,14 +55,14 @@ static void test_option_validity() | ||||
|         } | ||||
| 	)M"); | ||||
|  | ||||
|     AssertThat( | ||||
|         globalConfig().isOptionValid(globalConfig().findOption("option1")), | ||||
|     AssertThat(globalConfig().isOptionValid( | ||||
|                    *globalConfig().findOption("option1").option), | ||||
|         Equals(true)); | ||||
|     AssertThat( | ||||
|         globalConfig().isOptionValid(globalConfig().findOption("option2")), | ||||
|     AssertThat(globalConfig().isOptionValid( | ||||
|                    *globalConfig().findOption("option2").option), | ||||
|         Equals(false)); | ||||
|     AssertThat( | ||||
|         globalConfig().isOptionValid(globalConfig().findOption("option3")), | ||||
|     AssertThat(globalConfig().isOptionValid( | ||||
|                    *globalConfig().findOption("option3").option), | ||||
|         Equals(true)); | ||||
| } | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user