mirror of
				https://github.com/davidgiven/fluxengine.git
				synced 2025-10-24 11:11:02 -07:00 
			
		
		
		
	Compare commits
	
		
			261 Commits
		
	
	
		
			protobuf
			...
			dcae381973
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | dcae381973 | ||
|  | 2142bc7cce | ||
|  | ae3f82264a | ||
|  | 710e83c098 | ||
|  | 4f46fff3be | ||
|  | 58ea21a9a2 | ||
|  | 0fd1aa82a6 | ||
|  | 5b7f9d84f9 | ||
|  | 4b7e8e74a7 | ||
|  | 5375c72d02 | ||
|  | 5c257be164 | ||
|  | 7fa17322dc | ||
|  | ed3640d945 | ||
|  | 87ce3ad61d | ||
|  | 6d75feb0ce | ||
|  | 168b8b6f6c | ||
|  | 3d063e932a | ||
|  | 157ec569b2 | ||
|  | f63c8dadf1 | ||
|  | d17f6116f0 | ||
|  | 2d6cb22e3a | ||
|  | 2de8b52e56 | ||
|  | 171576e538 | ||
|  | 2db9f65e8b | ||
|  | 2572b64bd1 | ||
|  | 533aaf85f2 | ||
|  | f67ddc1f77 | ||
|  | b1d64f3683 | ||
|  | 7e8840e03f | ||
|  | b003297b22 | ||
|  | 7341cec2c4 | ||
|  | a98b7f72fd | ||
|  | 2e97579394 | ||
|  | f960c7efd0 | ||
|  | c2e7f32cba | ||
|  | 137528fc53 | ||
|  | cbf4cc35fb | ||
|  | cd7b3de1b3 | ||
|  | fddc2270e5 | ||
|  | 2a96d9bd78 | ||
|  | fd554f0808 | ||
|  | 6776c51b23 | ||
|  | ef58295304 | ||
|  | 2e2c3e3e34 | ||
|  | e87bb44a2d | ||
|  | 0ba0a9cce5 | ||
|  | 97bb563ba0 | ||
|  | 8f047f842e | ||
|  | 9d596ef530 | ||
|  | 580ffa8cf7 | ||
|  | 341e0a320d | ||
|  | cff0a9703c | ||
|  | 38618532c4 | ||
|  | 6026dcd86d | ||
|  | 3949971546 | ||
|  | 6146f442fb | ||
|  | 7090c1bfdf | ||
|  | 563babc969 | ||
|  | b649c2b9af | ||
|  | f7f887789c | ||
|  | a8fcdcc528 | ||
|  | a988578cc7 | ||
|  | ee585b24f0 | ||
|  | 3d6e980990 | ||
|  | f5d19416a9 | ||
|  | 4187fa5a09 | ||
|  | eb7613c03f | ||
|  | 7910429037 | ||
|  | cd1cc736a7 | ||
|  | e6d6805f25 | ||
|  | 9733879360 | ||
|  | 725712f796 | ||
|  | 2122cea5c4 | ||
|  | 5466e716a9 | ||
|  | 0dc0e3d9a1 | ||
|  | 4bb12b2caa | ||
|  | 0d9c5f5150 | ||
|  | 4030031a2c | ||
|  | 3143c87f1c | ||
|  | f16f02c4c7 | ||
|  | 3e13b2461d | ||
|  | 5fd0d1589e | ||
|  | 23e6d234d0 | ||
|  | cf2a97f8aa | ||
|  | 5a815e0cd6 | ||
|  | 06a3af2a1d | ||
|  | 0558d95fa3 | ||
|  | 81f9246ab8 | ||
|  | 6979567429 | ||
|  | 348de4165d | ||
|  | 0755d420dd | ||
|  | dead21bce5 | ||
|  | 4cf451ce60 | ||
|  | 72298ac805 | ||
|  | 3d1ad81652 | ||
|  | 88c79169b6 | ||
|  | d9747b9021 | ||
|  | 256976a5a1 | ||
|  | 0ba4b82e10 | ||
|  | ffd9e28b42 | ||
|  | 9c919c786d | ||
|  | 47a9a56959 | ||
|  | 6e03bc604a | ||
|  | feea6a027a | ||
|  | 08fa06b7fe | ||
|  | 8a976edef9 | ||
|  | c71d8d6c74 | ||
|  | e809af7426 | ||
|  | ab05db9040 | ||
|  | 04f916741e | ||
|  | f6224f3718 | ||
|  | 10185bb7a1 | ||
|  | d565960c70 | ||
|  | c21073294f | ||
|  | 3cd95de434 | ||
|  | 6552dba9aa | ||
|  | c8ebe55aa9 | ||
|  | 1eefa2d604 | ||
|  | a359394eea | ||
|  | 9f13026bec | ||
|  | 8fcc99b2a1 | ||
|  | 125a0536ff | ||
|  | 4115947d80 | ||
|  | 2f1dcd7c9a | ||
|  | 5e00ffca13 | ||
|  | ac27095493 | ||
|  | e27ca5cd4c | ||
|  | cc72ac6327 | ||
|  | 5443aa6501 | ||
|  | 902bf32169 | ||
|  | d200633747 | ||
|  | a48b749c2e | ||
|  | 46fab84b95 | ||
|  | b0290f858c | ||
|  | fe09c12cd6 | ||
|  | b5ae5a1cea | ||
|  | 113cb85512 | ||
|  | da276bcb3b | ||
|  | 9a78d0f38c | ||
|  | ec2e1666e7 | ||
|  | 478df40d4b | ||
|  | a8b9d79cb1 | ||
|  | 23865d1a10 | ||
|  | 458b3f24fe | ||
|  | 86fa23e6fa | ||
|  | dd9d5aaed5 | ||
|  | b22df17bb5 | ||
|  | b81e609e66 | ||
|  | d41e57cba6 | ||
|  | da7e83e257 | ||
|  | 83be12fcf1 | ||
|  | a999e2d6c9 | ||
|  | 6d6251e757 | ||
|  | be8b26ef94 | ||
|  | c6b8bce5d6 | ||
|  | d8b3452c07 | ||
|  | eddbd43cd9 | ||
|  | 168189180d | ||
|  | 9e092bab6a | ||
|  | 2c35126b3a | ||
|  | 7dc0e4ca31 | ||
|  | 96257f89d5 | ||
|  | 09919343b4 | ||
|  | b070c1068c | ||
|  | 5628a576db | ||
|  | 073c78e25f | ||
|  | 6a826d6eb5 | ||
|  | 11a6143d4c | ||
|  | 6127c9a46d | ||
|  | 98f7febef7 | ||
|  | 85afadacf0 | ||
|  | 01cd812162 | ||
|  | 39329acc77 | ||
|  | bdc96038ef | ||
|  | 93760d989a | ||
|  | b306c7063b | ||
|  | e3d7fa69d8 | ||
|  | f6c0e5405a | ||
|  | fc12a2662c | ||
|  | ab5b16488c | ||
|  | 4d5900268b | ||
|  | b5c5a4335d | ||
|  | e76235541a | ||
|  | e75e1a6e27 | ||
|  | aa220ecbcb | ||
|  | edc8d74418 | ||
|  | 2831aa09ae | ||
|  | e1b4b0d3a3 | ||
|  | e5df6ca33b | ||
|  | 68c3cbb020 | ||
|  | ca3c37d20a | ||
|  | 6fcd9233ea | ||
|  | 3761c4b1e2 | ||
|  | c89c53b1c7 | ||
|  | be0f63a133 | ||
|  | a8216995ad | ||
|  | 995359ef45 | ||
|  | bc84e3c8a0 | ||
|  | af12a25a9d | ||
|  | f6b2821221 | ||
|  | 458601a139 | ||
|  | a89130edbd | ||
|  | c95cd8a4da | ||
|  | 4d313a8495 | ||
|  | 263eef3442 | ||
|  | 2e97996211 | ||
|  | 7035b9c3c2 | ||
|  | 5628d2ca06 | ||
|  | 61cf7fbccf | ||
|  | ce347c6326 | ||
|  | 94119b19fe | ||
|  | 9c7be1268f | ||
|  | a9d59f67ba | ||
|  | 8d2a72228f | ||
|  | 60b95dd3f3 | ||
|  | b1094f40dc | ||
|  | e40ea80e34 | ||
|  | 9e1222d38a | ||
|  | 4446785729 | ||
|  | 790f015d72 | ||
|  | ccb0dcea3c | ||
|  | 15a0632af0 | ||
|  | 3c0da28947 | ||
|  | 95227f32ca | ||
|  | edf75b5cda | ||
|  | af87c48451 | ||
|  | 7cde8e3aa6 | ||
|  | 34fe6f0a5f | ||
|  | 76c9674f3f | ||
|  | addbabd123 | ||
|  | 46b90d9c36 | ||
|  | 7ee67082aa | ||
|  | e8042ed5f3 | ||
|  | 8828874c25 | ||
|  | 1bdb093319 | ||
|  | a1e2191ad5 | ||
|  | e61fcf1d9b | ||
|  | 610ef0dc4b | ||
|  | 273d38f237 | ||
|  | 8194a08382 | ||
|  | 6170b704b1 | ||
|  | b05f5e7caa | ||
|  | 4b38fc6044 | ||
|  | cee16a75ca | ||
|  | 9fd85a8289 | ||
|  | 2f1eff1474 | ||
|  | 8c582b8d72 | ||
|  | e49673329d | ||
|  | 07ebed83bf | ||
|  | 1def87fdc3 | ||
|  | d91fed7dd4 | ||
|  | 5f2f7e70ef | ||
|  | 83432beff6 | ||
|  | 979b550178 | ||
|  | 9062a531f3 | ||
|  | e2a6fbcf3c | ||
|  | ec16931f3a | ||
|  | 0ec0ca7495 | ||
|  | 51fa7c9371 | ||
|  | 6c69f10fe7 | ||
|  | 8d7dd4867b | 
							
								
								
									
										57
									
								
								.github/workflows/ccpp.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										57
									
								
								.github/workflows/ccpp.yml
									
									
									
									
										vendored
									
									
								
							| @@ -8,19 +8,21 @@ concurrency: | ||||
|  | ||||
| jobs: | ||||
|   build-linux: | ||||
|     runs-on: ubuntu-22.04 | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
|         repository: 'davidgiven/fluxengine' | ||||
|         path: 'fluxengine' | ||||
|         submodules: 'true' | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
|         repository: 'davidgiven/fluxengine-testdata' | ||||
|         path: 'fluxengine-testdata' | ||||
|     - name: apt | ||||
|       run: | | ||||
|         sudo apt install libudev-dev libsqlite3-dev protobuf-compiler libwxgtk3.0-gtk3-dev libfmt-dev libprotobuf-dev | ||||
|         sudo apt update | ||||
|         sudo apt install libudev-dev libsqlite3-dev protobuf-compiler libwxgtk3.2-dev libfmt-dev libprotobuf-dev libmagic-dev libmbedtls-dev libcurl4-openssl-dev libmagic-dev nlohmann-json3-dev libdbus-1-dev libglfw3-dev libmd4c-dev libfreetype-dev libcli11-dev libboost-regex-dev | ||||
|     - name: make | ||||
|       run: CXXFLAGS="-Wp,-D_GLIBCXX_ASSERTIONS" make -j`nproc` -C fluxengine | ||||
|  | ||||
| @@ -50,20 +52,22 @@ jobs: | ||||
|   build-macos-current: | ||||
|     strategy: | ||||
|       matrix: | ||||
|         runs-on: [macos-13, macos-latest] | ||||
|         runs-on: [macos-15, macos-15-intel] | ||||
|     runs-on: ${{ matrix.runs-on }} | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
|         repository: 'davidgiven/fluxengine' | ||||
|         path: 'fluxengine' | ||||
|         submodules: 'true' | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
|         repository: 'davidgiven/fluxengine-testdata' | ||||
|         path: 'fluxengine-testdata' | ||||
|     - name: brew | ||||
|       run: | | ||||
|         brew install sqlite pkg-config libusb protobuf wxwidgets fmt make coreutils dylibbundler libjpeg | ||||
|         brew install sqlite pkg-config libusb protobuf wxwidgets fmt make coreutils dylibbundler libjpeg libmagic nlohmann-json cli11 boost glfw3 md4c ninja python freetype2 mbedtls | ||||
|         brew upgrade | ||||
|     - name: make | ||||
|       run: gmake -C fluxengine | ||||
|     - name: Upload build artifacts | ||||
| @@ -76,29 +80,33 @@ jobs: | ||||
|  | ||||
|   build-windows: | ||||
|     runs-on: windows-latest | ||||
|     defaults: | ||||
|       run: | ||||
|         shell: msys2 {0} | ||||
|  | ||||
|     steps: | ||||
|     - name: setup WSL | ||||
|       run: | | ||||
|         curl -L https://github.com/WhitewaterFoundry/Fedora-Remix-for-WSL/releases/download/41.0.0/Fedora-Remix-for-WSL-SL_41.0.0.0_x64_arm64.msixbundle -o fedora.msixbundle | ||||
|         unzip fedora.msixbundle Fedora-Remix-for-WSL-SL_41.0.0.0_x64.msix | ||||
|         unzip Fedora-Remix-for-WSL-SL_41.0.0.0_x64.msix install.tar.gz | ||||
|         wsl --update | ||||
|         wsl --set-default-version 1 | ||||
|         wsl --import fedora fedora install.tar.gz | ||||
|         wsl --set-default fedora | ||||
|         wsl sh -c 'dnf -y install https://github.com/rpmsphere/noarch/raw/master/r/rpmsphere-release-40-1.noarch.rpm' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico' | ||||
|     - uses: msys2/setup-msys2@v2 | ||||
|       with: | ||||
|         msystem: mingw64 | ||||
|         update: true | ||||
|         install: | | ||||
|           python diffutils ninja make zip | ||||
|         pacboy: | | ||||
|           protobuf:p pkgconf:p curl-winssl:p file:p glfw:p mbedtls:p | ||||
|           sqlite:p freetype:p boost:p gcc:p binutils:p nsis:p abseil-cpp:p | ||||
|  | ||||
|     - name: fix line endings | ||||
|     - name: debug | ||||
|       run: | | ||||
|         git config --global core.autocrlf false | ||||
|         git config --global core.eol lf | ||||
|          | ||||
|         pacboy -Q --info protobuf:p | ||||
|         cat /mingw64/lib/pkgconfig/protobuf.pc | ||||
|         /mingw64/bin/pkg-config.exe protobuf --cflags | ||||
|         /mingw64/bin/pkg-config.exe protobuf --cflags --static | ||||
|  | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
|         repository: 'davidgiven/fluxengine' | ||||
|         path: 'fluxengine' | ||||
|         submodules: 'true' | ||||
|  | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
| @@ -107,17 +115,18 @@ jobs: | ||||
|  | ||||
|     - name: run | ||||
|       run: | | ||||
|         wsl sh -c 'make -C fluxengine BUILDTYPE=windows -j$(nproc)' | ||||
|         make -C fluxengine BUILDTYPE=windows | ||||
|  | ||||
|     - name: nsis | ||||
|       run: | | ||||
|         wsl sh -c 'cd fluxengine && strip fluxengine.exe -o fluxengine-stripped.exe' | ||||
|         wsl sh -c 'cd fluxengine && strip fluxengine-gui.exe -o fluxengine-gui-stripped.exe' | ||||
|         wsl sh -c 'cd fluxengine && makensis -v2 -nocd -dOUTFILE=fluxengine-installer.exe extras/windows-installer.nsi' | ||||
|         cd fluxengine | ||||
|         strip fluxengine.exe -o fluxengine-stripped.exe | ||||
|         strip fluxengine-gui.exe -o fluxengine-gui-stripped.exe | ||||
|         makensis -v2 -nocd -dOUTFILE=fluxengine-installer.exe extras/windows-installer.nsi | ||||
|  | ||||
|     - name: zip | ||||
|       run: | | ||||
|         wsl sh -c 'cd fluxengine && zip -9 fluxengine-windows.zip fluxengine.exe fluxengine-gui.exe upgrade-flux-file.exe brother120tool.exe brother240tool.exe FluxEngine.cydsn/CortexM3/ARM_GCC_541/Release/FluxEngine.hex fluxengine-installer.exe' | ||||
|         cd fluxengine && zip -9 fluxengine-windows.zip fluxengine.exe fluxengine-gui.exe upgrade-flux-file.exe brother120tool.exe brother240tool.exe FluxEngine.cydsn/CortexM3/ARM_GCC_541/Release/FluxEngine.hex fluxengine-installer.exe | ||||
|  | ||||
|     - name: Upload build artifacts | ||||
|       uses: actions/upload-artifact@v4 | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -24,7 +24,7 @@ jobs: | ||||
|         wsl --import fedora fedora install.tar.gz | ||||
|         wsl --set-default fedora | ||||
|         wsl sh -c 'dnf -y install https://github.com/rpmsphere/noarch/raw/master/r/rpmsphere-release-40-1.noarch.rpm' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico' | ||||
|         wsl sh -c 'dnf -y install gcc gcc-c++ protobuf-c-compiler protobuf-devel fmt-devel systemd-devel sqlite-devel wxGTK-devel mingw32-gcc mingw32-gcc-c++ mingw32-zlib-static mingw32-protobuf-static mingw32-sqlite-static mingw32-wxWidgets3-static mingw32-libpng-static mingw32-libjpeg-static mingw32-libtiff-static mingw32-nsis png2ico ninja-build' | ||||
|  | ||||
|     - name: fix line endings | ||||
|       run: | | ||||
|   | ||||
							
								
								
									
										36
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| [submodule "dep/imhex"] | ||||
| 	path = dep/imhex | ||||
| 	url = git@github.com:davidgiven/ImHex.git | ||||
| [submodule "dep/libwolv"] | ||||
| 	path = dep/libwolv | ||||
| 	url = https://github.com/WerWolv/libwolv.git | ||||
| [submodule "dep/imgui"] | ||||
| 	path = dep/imgui | ||||
| 	url = https://github.com/ocornut/imgui.git | ||||
| [submodule "dep/pattern-language"] | ||||
| 	path = dep/pattern-language | ||||
| 	url = https://github.com/WerWolv/PatternLanguage.git | ||||
| [submodule "dep/native-file-dialog"] | ||||
| 	path = dep/native-file-dialog | ||||
| 	url = https://github.com/btzy/nativefiledialog-extended.git | ||||
| [submodule "dep/xdgpp"] | ||||
| 	path = dep/xdgpp | ||||
| 	url = https://github.com/WerWolv/xdgpp.git | ||||
| [submodule "dep/libromfs"] | ||||
| 	path = dep/libromfs | ||||
| 	url = https://github.com/WerWolv/libromfs.git | ||||
| [submodule "dep/throwing_ptr"] | ||||
| 	path = dep/throwing_ptr | ||||
| 	url = https://github.com/rockdreamer/throwing_ptr.git | ||||
| [submodule "dep/lunasvg"] | ||||
| 	path = dep/lunasvg | ||||
| 	url = https://github.com/sammycage/lunasvg.git | ||||
| [submodule "dep/md4c"] | ||||
| 	path = dep/md4c | ||||
| 	url = https://github.com/mity/md4c | ||||
| [submodule "dep/nlohmann_json"] | ||||
| 	path = dep/nlohmann_json | ||||
| 	url = https://github.com/nlohmann/json | ||||
| [submodule "dep/cli11"] | ||||
| 	path = dep/cli11 | ||||
| 	url = https://github.com/CLIUtils/CLI11 | ||||
							
								
								
									
										89
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										89
									
								
								Makefile
									
									
									
									
									
								
							| @@ -8,25 +8,52 @@ ifeq ($(BUILDTYPE),) | ||||
| endif | ||||
| export BUILDTYPE | ||||
|  | ||||
| OPTFLAGS = -g -O3 | ||||
|  | ||||
| ifeq ($(BUILDTYPE),windows) | ||||
| 	MINGW = i686-w64-mingw32- | ||||
| 	MINGW = x86_64-w64-mingw32- | ||||
| 	CC = $(MINGW)gcc | ||||
| 	CXX = $(MINGW)g++ -std=c++20 | ||||
| 	CFLAGS += -g -O3 | ||||
| 	CXX = $(MINGW)g++ | ||||
| 	CFLAGS += \ | ||||
| 		$(OPTFLAGS) \ | ||||
| 		-ffunction-sections \ | ||||
| 		-fdata-sections \ | ||||
| 		-Wno-attributes \ | ||||
| 		-Wa,-mbig-obj \ | ||||
| 		-static | ||||
| 	CXXFLAGS += \ | ||||
| 		-fext-numeric-literals \ | ||||
| 		$(OPTFLAGS) \ | ||||
| 		-std=c++23 \ | ||||
| 		-Wno-deprecated-enum-float-conversion \ | ||||
| 		-Wno-deprecated-enum-enum-conversion | ||||
| 	LDFLAGS += -static | ||||
| 	AR = $(MINGW)ar | ||||
| 	PKG_CONFIG = $(MINGW)pkg-config -static | ||||
| 		-Wno-deprecated-enum-enum-conversion \ | ||||
| 		-Wno-attributes \ | ||||
| 		-Wa,-mbig-obj \ | ||||
| 		-static | ||||
| 	LDFLAGS += -Wl,--gc-sections -static | ||||
| 	AR = $(MINGW)gcc-ar | ||||
| 	PKG_CONFIG = $(MINGW)pkg-config --static | ||||
| 	WINDRES = $(MINGW)windres | ||||
| 	WX_CONFIG = /usr/i686-w64-mingw32/sys-root/mingw/bin/wx-config-3.0 --static=yes | ||||
| 	NINJA = /bin/ninja | ||||
| 	PROTOC = /mingw64/bin/protoc | ||||
| 	PROTOC_SEPARATOR = ; | ||||
| 	EXT = .exe | ||||
|  | ||||
| 	AB_SANDBOX = no | ||||
| else | ||||
| 	CC = gcc | ||||
| 	CXX = g++ -std=c++20 | ||||
| 	CFLAGS = -g -O3 \ | ||||
| 	CC = clang | ||||
| 	CXX = clang++ | ||||
| 	CFLAGS = \ | ||||
| 		$(OPTFLAGS) \ | ||||
| 		-I/opt/homebrew/include -I/usr/local/include \ | ||||
| 		-Wno-unknown-warning-option | ||||
| 	CXXFLAGS = \ | ||||
| 		$(OPTFLAGS) \ | ||||
| 		-std=c++23 \ | ||||
| 		-fexperimental-library \ | ||||
| 		-I/opt/homebrew/include -I/usr/local/include \ | ||||
| 		-Wformat \ | ||||
| 		-Wformat-security \ | ||||
| 		-Wno-deprecated-enum-float-conversion \ | ||||
| 		-Wno-deprecated-enum-enum-conversion | ||||
| 	LDFLAGS = | ||||
| @@ -51,31 +78,33 @@ BINDIR ?= $(PREFIX)/bin | ||||
|  | ||||
| # Special Windows settings. | ||||
|  | ||||
| ifeq ($(OS), Windows_NT) | ||||
| 	EXT ?= .exe | ||||
| 	MINGWBIN = /mingw32/bin | ||||
| 	CCPREFIX = $(MINGWBIN)/ | ||||
| 	PKG_CONFIG = $(MINGWBIN)/pkg-config | ||||
| 	WX_CONFIG = /usr/bin/sh $(MINGWBIN)/wx-config --static=yes | ||||
| 	PROTOC = $(MINGWBIN)/protoc | ||||
| 	WINDRES = windres | ||||
| 	LDFLAGS += \ | ||||
| 		-static | ||||
| 	CXXFLAGS += \ | ||||
| 		-fext-numeric-literals \ | ||||
| 		-Wno-deprecated-enum-float-conversion \ | ||||
| 		-Wno-deprecated-enum-enum-conversion | ||||
|  | ||||
| 	# Required to get the gcc run - time libraries on the path. | ||||
| 	export PATH := $(PATH):$(MINGWBIN) | ||||
| endif | ||||
| #ifeq ($(OS), Windows_NT) | ||||
| #	EXT ?= .exe | ||||
| #	MINGWBIN = /mingw32/bin | ||||
| #	CCPREFIX = $(MINGWBIN)/ | ||||
| #	PKG_CONFIG = $(MINGWBIN)/pkg-config | ||||
| #	WX_CONFIG = /usr/bin/sh $(MINGWBIN)/wx-config --static=yes | ||||
| #	PROTOC = $(MINGWBIN)/protoc | ||||
| #	WINDRES = windres | ||||
| #	LDFLAGS += \ | ||||
| #		-static | ||||
| #	CXXFLAGS += \ | ||||
| #		-fext-numeric-literals \ | ||||
| #		-Wno-deprecated-enum-float-conversion \ | ||||
| #		-Wno-deprecated-enum-enum-conversion | ||||
| # | ||||
| #	# Required to get the gcc run - time libraries on the path. | ||||
| #	export PATH := $(PATH):$(MINGWBIN) | ||||
| #endif | ||||
|  | ||||
| # Special OSX settings. | ||||
|  | ||||
| ifeq ($(shell uname),Darwin) | ||||
| 	LDFLAGS += \ | ||||
| 		-framework IOKit \ | ||||
| 		-framework Foundation  | ||||
| 		-framework AppKit  \ | ||||
| 		-framework UniformTypeIdentifiers \ | ||||
| 		-framework UserNotifications | ||||
| endif | ||||
|  | ||||
| .PHONY: all | ||||
|   | ||||
| @@ -36,8 +36,8 @@ public: | ||||
|             decodeFmMfm(rawbits).slice(0, AESLANIER_RECORD_SIZE); | ||||
|         const auto& reversed = bytes.reverseBits(); | ||||
|  | ||||
|         _sector->logicalTrack = reversed[1]; | ||||
|         _sector->logicalSide = 0; | ||||
|         _sector->logicalCylinder = reversed[1]; | ||||
|         _sector->logicalHead = 0; | ||||
|         _sector->logicalSector = reversed[2]; | ||||
|  | ||||
|         /* Check header 'checksum' (which seems far too simple to mean much). */ | ||||
|   | ||||
| @@ -59,9 +59,9 @@ public: | ||||
|         if (bytes[3] != 0x5a) | ||||
|             return; | ||||
|  | ||||
|         _sector->logicalTrack = bytes[1] >> 1; | ||||
|         _sector->logicalCylinder = bytes[1] >> 1; | ||||
|         _sector->logicalSector = bytes[2]; | ||||
|         _sector->logicalSide = bytes[1] & 1; | ||||
|         _sector->logicalHead = bytes[1] & 1; | ||||
|         _sector->status = Sector::DATA_MISSING; /* unintuitive but correct */ | ||||
|     } | ||||
|  | ||||
|   | ||||
| @@ -58,13 +58,10 @@ private: | ||||
|     }; | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|         auto trackLayout = Layout::getLayoutOfTrack( | ||||
|             trackInfo->logicalTrack, trackInfo->logicalSide); | ||||
|  | ||||
|         double clockRateUs = _config.target_clock_period_us() / 2.0; | ||||
|         int bitsPerRevolution = | ||||
|             (_config.target_rotational_period_ms() * 1000.0) / clockRateUs; | ||||
| @@ -80,7 +77,7 @@ public: | ||||
|             writeFillerRawBytes(_config.pre_sector_gap_bytes(), 0xaaaa); | ||||
|             writeRawBits(SECTOR_ID, 64); | ||||
|             writeByte(0x5a); | ||||
|             writeByte((sector->logicalTrack << 1) | sector->logicalSide); | ||||
|             writeByte((sector->logicalCylinder << 1) | sector->logicalHead); | ||||
|             writeByte(sector->logicalSector); | ||||
|             writeByte(0x5a); | ||||
|  | ||||
|   | ||||
| @@ -52,8 +52,8 @@ public: | ||||
|         Bytes header = amigaDeinterleave(ptr, 4); | ||||
|         Bytes recoveryinfo = amigaDeinterleave(ptr, 16); | ||||
|  | ||||
|         _sector->logicalTrack = header[1] >> 1; | ||||
|         _sector->logicalSide = header[1] & 1; | ||||
|         _sector->logicalCylinder = header[1] >> 1; | ||||
|         _sector->logicalHead = header[1] & 1; | ||||
|         _sector->logicalSector = header[2]; | ||||
|  | ||||
|         uint32_t wantedheaderchecksum = | ||||
|   | ||||
| @@ -84,7 +84,7 @@ static void write_sector(std::vector<bool>& bits, | ||||
|  | ||||
|     checksum = 0; | ||||
|     Bytes header = {0xff, /* Amiga 1.0 format byte */ | ||||
|         (uint8_t)((sector->logicalTrack << 1) | sector->logicalSide), | ||||
|         (uint8_t)((sector->logicalCylinder << 1) | sector->logicalHead), | ||||
|         (uint8_t)sector->logicalSector, | ||||
|         (uint8_t)(AMIGA_SECTORS_PER_TRACK - sector->logicalSector)}; | ||||
|     write_interleaved_bytes(header); | ||||
| @@ -110,7 +110,7 @@ public: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| #include "protocol.h" | ||||
| #include "lib/decoders/decoders.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/data/layout.h" | ||||
| #include "arch/apple2/apple2.h" | ||||
| #include "arch/apple2/apple2.pb.h" | ||||
| #include "lib/decoders/decoders.pb.h" | ||||
| @@ -93,24 +94,25 @@ public: | ||||
|         ByteReader br(header); | ||||
|  | ||||
|         uint8_t volume = combine(br.read_be16()); | ||||
|         _sector->logicalTrack = combine(br.read_be16()); | ||||
|         _sector->logicalSide = _sector->physicalSide; | ||||
|         _sector->logicalCylinder = combine(br.read_be16()); | ||||
|         _sector->logicalHead = _ltl->logicalHead; | ||||
|         _sector->logicalSector = combine(br.read_be16()); | ||||
|         uint8_t checksum = combine(br.read_be16()); | ||||
|  | ||||
|         // If the checksum is correct, upgrade the sector from MISSING | ||||
|         // to DATA_MISSING in anticipation of its data record | ||||
|         if (checksum == | ||||
|             (volume ^ _sector->logicalTrack ^ _sector->logicalSector)) | ||||
|             (volume ^ _sector->logicalCylinder ^ _sector->logicalSector)) | ||||
|             _sector->status = | ||||
|                 Sector::DATA_MISSING; /* unintuitive but correct */ | ||||
|  | ||||
|         if (_sector->logicalSide == 1) | ||||
|             _sector->logicalTrack -= _config.apple2().side_one_track_offset(); | ||||
|         if (_sector->logicalHead == 1) | ||||
|             _sector->logicalCylinder -= | ||||
|                 _config.apple2().side_one_track_offset(); | ||||
|  | ||||
|         /* Sanity check. */ | ||||
|  | ||||
|         if (_sector->logicalTrack > 100) | ||||
|         if (_sector->logicalCylinder > 100) | ||||
|         { | ||||
|             _sector->status = Sector::MISSING; | ||||
|             return; | ||||
|   | ||||
| @@ -36,7 +36,7 @@ private: | ||||
|     const Apple2EncoderProto& _config; | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
| @@ -129,8 +129,8 @@ private: | ||||
|             // extra padding. | ||||
|             write_ff40(sector.logicalSector == 0 ? 32 : 8); | ||||
|  | ||||
|             int track = sector.logicalTrack; | ||||
|             if (sector.logicalSide == 1) | ||||
|             int track = sector.logicalCylinder; | ||||
|             if (sector.logicalHead == 1) | ||||
|                 track += _config.side_one_track_offset(); | ||||
|  | ||||
|             // Write address field: APPLE2_SECTOR_RECORD + sector identifier + | ||||
|   | ||||
| @@ -75,14 +75,14 @@ public: | ||||
|         const auto& bytes = toBytes(rawbits).slice(0, 4); | ||||
|  | ||||
|         ByteReader br(bytes); | ||||
|         _sector->logicalTrack = decode_header_gcr(br.read_be16()); | ||||
|         _sector->logicalCylinder = decode_header_gcr(br.read_be16()); | ||||
|         _sector->logicalSector = decode_header_gcr(br.read_be16()); | ||||
|  | ||||
|         /* Sanity check the values read; there's no header checksum and | ||||
|          * occasionally we get garbage due to bit errors. */ | ||||
|         if (_sector->logicalSector > 11) | ||||
|             return; | ||||
|         if (_sector->logicalTrack > 79) | ||||
|         if (_sector->logicalCylinder > 79) | ||||
|             return; | ||||
|  | ||||
|         _sector->status = Sector::DATA_MISSING; | ||||
|   | ||||
| @@ -107,7 +107,7 @@ public: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
| @@ -127,7 +127,7 @@ public: | ||||
|             fillBitmapTo(bits, cursor, headerCursor, {true, false}); | ||||
|             write_sector_header(bits, | ||||
|                 cursor, | ||||
|                 sectorData->logicalTrack, | ||||
|                 sectorData->logicalCylinder, | ||||
|                 sectorData->logicalSector); | ||||
|             fillBitmapTo(bits, cursor, dataCursor, {true, false}); | ||||
|             write_sector_data(bits, cursor, sectorData->data); | ||||
|   | ||||
| @@ -74,8 +74,8 @@ public: | ||||
|  | ||||
|         uint8_t checksum = bytes[0]; | ||||
|         _sector->logicalSector = bytes[1]; | ||||
|         _sector->logicalSide = 0; | ||||
|         _sector->logicalTrack = bytes[2] - 1; | ||||
|         _sector->logicalHead = 0; | ||||
|         _sector->logicalCylinder = bytes[2] - 1; | ||||
|         if (checksum == xorBytes(bytes.slice(1, 4))) | ||||
|             _sector->status = | ||||
|                 Sector::DATA_MISSING; /* unintuitive but correct */ | ||||
|   | ||||
| @@ -155,7 +155,7 @@ public: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
| @@ -178,7 +178,7 @@ public: | ||||
|         else | ||||
|             _formatByte1 = _formatByte2 = 0; | ||||
|  | ||||
|         double clockRateUs = clockPeriodForC64Track(trackInfo->logicalTrack); | ||||
|         double clockRateUs = clockPeriodForC64Track(ltl.logicalCylinder); | ||||
|         int bitsPerRevolution = 200000.0 / clockRateUs; | ||||
|  | ||||
|         std::vector<bool> bits(bitsPerRevolution); | ||||
| @@ -245,7 +245,7 @@ private: | ||||
|              *   06-07 - $0F ("off" bytes) | ||||
|              */ | ||||
|             uint8_t encodedTrack = | ||||
|                 ((sector->logicalTrack) + | ||||
|                 ((sector->logicalCylinder) + | ||||
|                     1); // C64 track numbering starts with 1. Fluxengine with 0. | ||||
|             uint8_t encodedSector = sector->logicalSector; | ||||
|             // uint8_t formatByte1 = C64_FORMAT_ID_BYTE1; | ||||
|   | ||||
| @@ -76,8 +76,8 @@ public: | ||||
|         const auto& bytes = decode(readRawBits(6 * 10)); | ||||
|  | ||||
|         _sector->logicalSector = bytes[2]; | ||||
|         _sector->logicalSide = 0; | ||||
|         _sector->logicalTrack = bytes[0]; | ||||
|         _sector->logicalHead = 0; | ||||
|         _sector->logicalCylinder = bytes[0]; | ||||
|  | ||||
|         uint16_t wantChecksum = bytes.reader().seek(4).read_be16(); | ||||
|         uint16_t gotChecksum = crc16(CCITT_POLY, 0xef21, bytes.slice(0, 4)); | ||||
|   | ||||
| @@ -126,8 +126,8 @@ public: | ||||
|             return; | ||||
|  | ||||
|         uint8_t abssector = id[2]; | ||||
|         _sector->logicalTrack = abssector >> 1; | ||||
|         _sector->logicalSide = 0; | ||||
|         _sector->logicalCylinder = abssector >> 1; | ||||
|         _sector->logicalHead = 0; | ||||
|         _sector->logicalSector = abssector & 1; | ||||
|         _sector->data.writer().append(id.slice(5, 12)).append(payload); | ||||
|  | ||||
|   | ||||
| @@ -141,11 +141,10 @@ public: | ||||
|         bw += decodeFmMfm(bits).slice(0, IBM_IDAM_LEN); | ||||
|  | ||||
|         IbmDecoderProto::TrackdataProto trackdata; | ||||
|         getTrackFormat( | ||||
|             trackdata, _sector->physicalTrack, _sector->physicalSide); | ||||
|         getTrackFormat(trackdata, _ltl->logicalCylinder, _ltl->logicalHead); | ||||
|  | ||||
|         _sector->logicalTrack = br.read_8(); | ||||
|         _sector->logicalSide = br.read_8(); | ||||
|         _sector->logicalCylinder = br.read_8(); | ||||
|         _sector->logicalHead = br.read_8(); | ||||
|         _sector->logicalSector = br.read_8(); | ||||
|         _currentSectorSize = 1 << (br.read_8() + 7); | ||||
|  | ||||
| @@ -156,11 +155,10 @@ public: | ||||
|                 Sector::DATA_MISSING; /* correct but unintuitive */ | ||||
|  | ||||
|         if (trackdata.ignore_side_byte()) | ||||
|             _sector->logicalSide = | ||||
|                 Layout::remapSidePhysicalToLogical(_sector->physicalSide); | ||||
|         _sector->logicalSide ^= trackdata.invert_side_byte(); | ||||
|             _sector->logicalHead = _ltl->logicalHead; | ||||
|         _sector->logicalHead ^= trackdata.invert_side_byte(); | ||||
|         if (trackdata.ignore_track_byte()) | ||||
|             _sector->logicalTrack = _sector->physicalTrack; | ||||
|             _sector->logicalCylinder = _ltl->logicalCylinder; | ||||
|  | ||||
|         for (int sector : trackdata.ignore_sector()) | ||||
|             if (_sector->logicalSector == sector) | ||||
| @@ -209,16 +207,14 @@ public: | ||||
|         _sector->status = | ||||
|             (wantCrc == gotCrc) ? Sector::OK : Sector::BAD_CHECKSUM; | ||||
|  | ||||
|         auto layout = Layout::getLayoutOfTrack( | ||||
|             _sector->logicalTrack, _sector->logicalSide); | ||||
|         if (_currentSectorSize != layout->sectorSize) | ||||
|         if (_currentSectorSize != _ltl->sectorSize) | ||||
|             std::cerr << fmt::format( | ||||
|                 "Warning: configured sector size for t{}.h{}.s{} is {} bytes " | ||||
|                 "but that seen on disk is {} bytes\n", | ||||
|                 _sector->logicalTrack, | ||||
|                 _sector->logicalSide, | ||||
|                 _sector->logicalCylinder, | ||||
|                 _sector->logicalHead, | ||||
|                 _sector->logicalSector, | ||||
|                 layout->sectorSize, | ||||
|                 _ltl->sectorSize, | ||||
|                 _currentSectorSize); | ||||
|     } | ||||
|  | ||||
|   | ||||
| @@ -107,16 +107,12 @@ private: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|         IbmEncoderProto::TrackdataProto trackdata; | ||||
|         getEncoderTrackData( | ||||
|             trackdata, trackInfo->logicalTrack, trackInfo->logicalSide); | ||||
|  | ||||
|         auto trackLayout = Layout::getLayoutOfTrack( | ||||
|             trackInfo->logicalTrack, trackInfo->logicalSide); | ||||
|         getEncoderTrackData(trackdata, ltl.logicalCylinder, ltl.logicalHead); | ||||
|  | ||||
|         auto writeBytes = [&](const Bytes& bytes) | ||||
|         { | ||||
| @@ -152,7 +148,7 @@ public: | ||||
|  | ||||
|         uint8_t sectorSize = 0; | ||||
|         { | ||||
|             int s = trackLayout->sectorSize >> 7; | ||||
|             int s = ltl.sectorSize >> 7; | ||||
|             while (s > 1) | ||||
|             { | ||||
|                 s >>= 1; | ||||
| @@ -202,9 +198,9 @@ public: | ||||
|                         bw.write_8(MFM_RECORD_SEPARATOR_BYTE); | ||||
|                 } | ||||
|                 bw.write_8(idamUnencoded); | ||||
|                 bw.write_8(sectorData->logicalTrack); | ||||
|                 bw.write_8(sectorData->logicalCylinder); | ||||
|                 bw.write_8( | ||||
|                     sectorData->logicalSide ^ trackdata.invert_side_byte()); | ||||
|                     sectorData->logicalHead ^ trackdata.invert_side_byte()); | ||||
|                 bw.write_8(sectorData->logicalSector); | ||||
|                 bw.write_8(sectorSize); | ||||
|                 uint16_t crc = crc16(CCITT_POLY, header); | ||||
| @@ -237,8 +233,7 @@ public: | ||||
|                 } | ||||
|                 bw.write_8(damUnencoded); | ||||
|  | ||||
|                 Bytes truncatedData = | ||||
|                     sectorData->data.slice(0, trackLayout->sectorSize); | ||||
|                 Bytes truncatedData = sectorData->data.slice(0, ltl.sectorSize); | ||||
|                 bw += truncatedData; | ||||
|                 uint16_t crc = crc16(CCITT_POLY, data); | ||||
|                 bw.write_be16(crc); | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| #include "protocol.h" | ||||
| #include "lib/decoders/decoders.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/data/layout.h" | ||||
| #include "arch/macintosh/macintosh.h" | ||||
| #include "lib/core/bytes.h" | ||||
| #include "fmt/format.h" | ||||
| @@ -146,7 +147,7 @@ public: | ||||
|         auto header = toBytes(readRawBits(7 * 8)).slice(0, 7); | ||||
|  | ||||
|         uint8_t encodedTrack = decode_data_gcr(header[0]); | ||||
|         if (encodedTrack != (_sector->physicalTrack & 0x3f)) | ||||
|         if (encodedTrack != (_ltl->logicalCylinder & 0x3f)) | ||||
|             return; | ||||
|  | ||||
|         uint8_t encodedSector = decode_data_gcr(header[1]); | ||||
| @@ -157,8 +158,8 @@ public: | ||||
|         if (encodedSector > 11) | ||||
|             return; | ||||
|  | ||||
|         _sector->logicalTrack = _sector->physicalTrack; | ||||
|         _sector->logicalSide = decode_side(encodedSide); | ||||
|         _sector->logicalCylinder = _ltl->logicalCylinder; | ||||
|         _sector->logicalHead = decode_side(encodedSide); | ||||
|         _sector->logicalSector = encodedSector; | ||||
|         uint8_t gotsum = | ||||
|             (encodedTrack ^ encodedSector ^ encodedSide ^ formatByte) & 0x3f; | ||||
|   | ||||
| @@ -181,10 +181,10 @@ static void write_sector(std::vector<bool>& bits, | ||||
|         write_bits(bits, cursor, 0xff3fcff3fcffLL, 6 * 8); /* sync */ | ||||
|     write_bits(bits, cursor, MAC_SECTOR_RECORD, 3 * 8); | ||||
|  | ||||
|     uint8_t encodedTrack = sector->logicalTrack & 0x3f; | ||||
|     uint8_t encodedTrack = sector->logicalCylinder & 0x3f; | ||||
|     uint8_t encodedSector = sector->logicalSector; | ||||
|     uint8_t encodedSide = | ||||
|         encode_side(sector->logicalTrack, sector->logicalSide); | ||||
|         encode_side(sector->logicalCylinder, sector->logicalHead); | ||||
|     uint8_t formatByte = MAC_FORMAT_BYTE; | ||||
|     uint8_t headerChecksum = | ||||
|         (encodedTrack ^ encodedSector ^ encodedSide ^ formatByte) & 0x3f; | ||||
| @@ -220,11 +220,11 @@ public: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|         double clockRateUs = clockRateUsForTrack(trackInfo->logicalTrack); | ||||
|         double clockRateUs = clockRateUsForTrack(ltl.logicalCylinder); | ||||
|         int bitsPerRevolution = 200000.0 / clockRateUs; | ||||
|         std::vector<bool> bits(bitsPerRevolution); | ||||
|         unsigned cursor = 0; | ||||
|   | ||||
| @@ -4,6 +4,7 @@ | ||||
| #include "lib/data/fluxpattern.h" | ||||
| #include "lib/decoders/decoders.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/data/layout.h" | ||||
| #include "arch/micropolis/micropolis.h" | ||||
| #include "lib/core/bytes.h" | ||||
| #include "fmt/format.h" | ||||
| @@ -222,14 +223,14 @@ public: | ||||
|         if (syncByte != 0xFF) | ||||
|             return; | ||||
|  | ||||
|         _sector->logicalTrack = br.read_8(); | ||||
|         _sector->logicalSide = _sector->physicalSide; | ||||
|         _sector->logicalCylinder = br.read_8(); | ||||
|         _sector->logicalHead = _ltl->logicalHead; | ||||
|         _sector->logicalSector = br.read_8(); | ||||
|         if (_sector->logicalSector > 15) | ||||
|             return; | ||||
|         if (_sector->logicalTrack > 76) | ||||
|         if (_sector->logicalCylinder > 76) | ||||
|             return; | ||||
|         if (_sector->logicalTrack != _sector->physicalTrack) | ||||
|         if (_sector->logicalCylinder != _ltl->logicalCylinder) | ||||
|             return; | ||||
|  | ||||
|         br.read(10); /* OS data or padding */ | ||||
|   | ||||
| @@ -40,7 +40,7 @@ static void write_sector(std::vector<bool>& bits, | ||||
|     { | ||||
|         ByteWriter writer(sectorData); | ||||
|         writer.write_8(0xff); /* Sync */ | ||||
|         writer.write_8(sector->logicalTrack); | ||||
|         writer.write_8(sector->logicalCylinder); | ||||
|         writer.write_8(sector->logicalSector); | ||||
|         for (int i = 0; i < 10; i++) | ||||
|             writer.write_8(0); /* Padding */ | ||||
| @@ -87,7 +87,7 @@ public: | ||||
|     { | ||||
|     } | ||||
|  | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|   | ||||
| @@ -6,6 +6,7 @@ | ||||
| #include "lib/data/fluxmapreader.h" | ||||
| #include "lib/data/fluxpattern.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/data/layout.h" | ||||
| #include <string.h> | ||||
|  | ||||
| const int SECTOR_SIZE = 256; | ||||
| @@ -64,8 +65,8 @@ public: | ||||
|             gotChecksum += br.read_be16(); | ||||
|         uint16_t wantChecksum = br.read_be16(); | ||||
|  | ||||
|         _sector->logicalTrack = _sector->physicalTrack; | ||||
|         _sector->logicalSide = _sector->physicalSide; | ||||
|         _sector->logicalCylinder = _ltl->logicalCylinder; | ||||
|         _sector->logicalHead = _ltl->logicalHead; | ||||
|         _sector->logicalSector = _currentSector; | ||||
|         _sector->data = bytes.slice(0, SECTOR_SIZE).swab(); | ||||
|         _sector->status = | ||||
|   | ||||
| @@ -17,6 +17,7 @@ | ||||
| #include "lib/data/fluxpattern.h" | ||||
| #include "lib/decoders/decoders.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/data/layout.h" | ||||
| #include "arch/northstar/northstar.h" | ||||
| #include "lib/core/bytes.h" | ||||
| #include "lib/decoders/decoders.pb.h" | ||||
| @@ -159,9 +160,9 @@ public: | ||||
|         auto bytes = decodeFmMfm(rawbits).slice(0, recordSize); | ||||
|         ByteReader br(bytes); | ||||
|  | ||||
|         _sector->logicalSide = _sector->physicalSide; | ||||
|         _sector->logicalHead = _ltl->logicalHead; | ||||
|         _sector->logicalSector = _hardSectorId; | ||||
|         _sector->logicalTrack = _sector->physicalTrack; | ||||
|         _sector->logicalCylinder = _ltl->logicalCylinder; | ||||
|  | ||||
|         if (headerSize == NORTHSTAR_HEADER_SIZE_DD) | ||||
|         { | ||||
|   | ||||
| @@ -129,7 +129,7 @@ public: | ||||
|     { | ||||
|     } | ||||
|  | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| #include "protocol.h" | ||||
| #include "lib/decoders/decoders.h" | ||||
| #include "lib/data/sector.h" | ||||
| #include "lib/data/layout.h" | ||||
| #include "arch/smaky6/smaky6.h" | ||||
| #include "lib/core/bytes.h" | ||||
| #include "lib/core/crc.h" | ||||
| @@ -129,11 +130,11 @@ public: | ||||
|         uint8_t wantedChecksum = br.read_8(); | ||||
|         uint8_t gotChecksum = sumBytes(data) & 0xff; | ||||
|  | ||||
|         if (track != _sector->physicalTrack) | ||||
|         if (track != _ltl->logicalCylinder) | ||||
|             return; | ||||
|  | ||||
|         _sector->logicalTrack = _sector->physicalTrack; | ||||
|         _sector->logicalSide = _sector->physicalSide; | ||||
|         _sector->logicalCylinder = _ltl->physicalCylinder; | ||||
|         _sector->logicalHead = _ltl->logicalHead; | ||||
|         _sector->logicalSector = _sectorId; | ||||
|  | ||||
|         _sector->data = data; | ||||
|   | ||||
| @@ -43,8 +43,8 @@ public: | ||||
|  | ||||
|         ByteReader br(bytes); | ||||
|         uint8_t track = br.read_8(); | ||||
|         _sector->logicalTrack = track >> 1; | ||||
|         _sector->logicalSide = track & 1; | ||||
|         _sector->logicalCylinder = track >> 1; | ||||
|         _sector->logicalHead = track & 1; | ||||
|         br.skip(1); /* seems always to be 1 */ | ||||
|         _sector->logicalSector = br.read_8(); | ||||
|         uint8_t wantChecksum = br.read_8(); | ||||
|   | ||||
| @@ -17,7 +17,7 @@ public: | ||||
|     { | ||||
|     } | ||||
|  | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
| @@ -83,7 +83,7 @@ private: | ||||
|             Bytes bytes; | ||||
|             ByteWriter bw(bytes); | ||||
|             bw.write_8( | ||||
|                 (sectorData->logicalTrack << 1) | sectorData->logicalSide); | ||||
|                 (sectorData->logicalCylinder << 1) | sectorData->logicalHead); | ||||
|             bw.write_8(1); | ||||
|             bw.write_8(sectorData->logicalSector); | ||||
|             bw.write_8(~sumBytes(bytes.slice(0, 3))); | ||||
|   | ||||
| @@ -64,8 +64,8 @@ public: | ||||
|         uint16_t gotChecksum = | ||||
|             crc16(CCITT_POLY, bytes.slice(1, TIDS990_SECTOR_RECORD_SIZE - 3)); | ||||
|  | ||||
|         _sector->logicalSide = br.read_8() >> 3; | ||||
|         _sector->logicalTrack = br.read_8(); | ||||
|         _sector->logicalHead = br.read_8() >> 3; | ||||
|         _sector->logicalCylinder = br.read_8(); | ||||
|         br.read_8(); /* number of sectors per track */ | ||||
|         _sector->logicalSector = br.read_8(); | ||||
|         br.read_be16(); /* sector size */ | ||||
|   | ||||
| @@ -59,7 +59,7 @@ private: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
| @@ -95,8 +95,8 @@ public: | ||||
|  | ||||
|                 writeBytes(12, 0x55); | ||||
|                 bw.write_8(am1Unencoded); | ||||
|                 bw.write_8(sectorData->logicalSide << 3); | ||||
|                 bw.write_8(sectorData->logicalTrack); | ||||
|                 bw.write_8(sectorData->logicalHead << 3); | ||||
|                 bw.write_8(sectorData->logicalCylinder); | ||||
|                 bw.write_8(_config.sector_count()); | ||||
|                 bw.write_8(sectorData->logicalSector); | ||||
|                 bw.write_be16(sectorData->data.size()); | ||||
|   | ||||
| @@ -80,11 +80,11 @@ public: | ||||
|         _sector->logicalSector = bytes[1]; | ||||
|         uint8_t gotChecksum = bytes[2]; | ||||
|  | ||||
|         _sector->logicalTrack = rawTrack & 0x7f; | ||||
|         _sector->logicalSide = rawTrack >> 7; | ||||
|         _sector->logicalCylinder = rawTrack & 0x7f; | ||||
|         _sector->logicalHead = rawTrack >> 7; | ||||
|         uint8_t wantChecksum = bytes[0] + bytes[1]; | ||||
|         if ((_sector->logicalSector > 20) || (_sector->logicalTrack > 85) || | ||||
|             (_sector->logicalSide > 1)) | ||||
|         if ((_sector->logicalSector > 20) || (_sector->logicalCylinder > 85) || | ||||
|             (_sector->logicalHead > 1)) | ||||
|             return; | ||||
|  | ||||
|         if (wantChecksum == gotChecksum) | ||||
|   | ||||
| @@ -112,7 +112,7 @@ static void write_sector(std::vector<bool>& bits, | ||||
|     write_one_bits(bits, cursor, trackdata.pre_header_sync_bits()); | ||||
|     write_bits(bits, cursor, VICTOR9K_SECTOR_RECORD, 10); | ||||
|  | ||||
|     uint8_t encodedTrack = sector.logicalTrack | (sector.logicalSide << 7); | ||||
|     uint8_t encodedTrack = sector.logicalCylinder | (sector.logicalHead << 7); | ||||
|     uint8_t encodedSector = sector.logicalSector; | ||||
|     write_bytes(bits, | ||||
|         cursor, | ||||
| @@ -164,13 +164,12 @@ private: | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     std::unique_ptr<Fluxmap> encode(const LogicalTrackLayout& ltl, | ||||
|         const std::vector<std::shared_ptr<const Sector>>& sectors, | ||||
|         const Image& image) override | ||||
|     { | ||||
|         Victor9kEncoderProto::TrackdataProto trackdata; | ||||
|         getTrackFormat( | ||||
|             trackdata, trackInfo->logicalTrack, trackInfo->logicalSide); | ||||
|         getTrackFormat(trackdata, ltl.logicalCylinder, ltl.logicalHead); | ||||
|  | ||||
|         unsigned bitsPerRevolution = (trackdata.rotational_period_ms() * 1e3) / | ||||
|                                      trackdata.clock_period_us(); | ||||
|   | ||||
| @@ -34,11 +34,11 @@ public: | ||||
|         ByteReader br(bytes); | ||||
|  | ||||
|         _sector->logicalSector = br.read_8() & 0x1f; | ||||
|         _sector->logicalSide = 0; | ||||
|         _sector->logicalTrack = br.read_8() & 0x7f; | ||||
|         _sector->logicalHead = 0; | ||||
|         _sector->logicalCylinder = br.read_8() & 0x7f; | ||||
|         if (_sector->logicalSector > 31) | ||||
|             return; | ||||
|         if (_sector->logicalTrack > 80) | ||||
|         if (_sector->logicalCylinder > 80) | ||||
|             return; | ||||
|  | ||||
|         _sector->data = br.read(132); | ||||
|   | ||||
							
								
								
									
										28
									
								
								build.py
									
									
									
									
									
								
							
							
						
						
									
										28
									
								
								build.py
									
									
									
									
									
								
							| @@ -8,7 +8,7 @@ import config | ||||
| import re | ||||
|  | ||||
| # Hack for building on Fedora/WSL; executables get the .exe extension, | ||||
| # build the build system detects it as Linux. | ||||
| # but the build system detects it as Linux. | ||||
| import build.toolchain | ||||
|  | ||||
| toolchain.Toolchain.EXE = "$(EXT)" | ||||
| @@ -28,7 +28,7 @@ else: | ||||
|         ("acorndfs", "", "--200"), | ||||
|         ("agat", "", ""), | ||||
|         ("amiga", "", ""), | ||||
|         ("apple2", "", "--140 40track_drive"), | ||||
|         ("apple2", "", "--140 --drivetype=40"), | ||||
|         ("atarist", "", "--360"), | ||||
|         ("atarist", "", "--370"), | ||||
|         ("atarist", "", "--400"), | ||||
| @@ -38,17 +38,17 @@ else: | ||||
|         ("atarist", "", "--800"), | ||||
|         ("atarist", "", "--820"), | ||||
|         ("bk", "", ""), | ||||
|         ("brother", "", "--120 40track_drive"), | ||||
|         ("brother", "", "--120 --drivetype=40"), | ||||
|         ("brother", "", "--240"), | ||||
|         ( | ||||
|             "commodore", | ||||
|             "scripts/commodore1541_test.textpb", | ||||
|             "--171 40track_drive", | ||||
|             "--171 --drivetype=40", | ||||
|         ), | ||||
|         ( | ||||
|             "commodore", | ||||
|             "scripts/commodore1541_test.textpb", | ||||
|             "--192 40track_drive", | ||||
|             "--192 --drivetype=40", | ||||
|         ), | ||||
|         ("commodore", "", "--800"), | ||||
|         ("commodore", "", "--1620"), | ||||
| @@ -60,17 +60,17 @@ else: | ||||
|         ("ibm", "", "--1232"), | ||||
|         ("ibm", "", "--1440"), | ||||
|         ("ibm", "", "--1680"), | ||||
|         ("ibm", "", "--180 40track_drive"), | ||||
|         ("ibm", "", "--160 40track_drive"), | ||||
|         ("ibm", "", "--320 40track_drive"), | ||||
|         ("ibm", "", "--360 40track_drive"), | ||||
|         ("ibm", "", "--180 --drivetype=40"), | ||||
|         ("ibm", "", "--160 --drivetype=40"), | ||||
|         ("ibm", "", "--320 --drivetype=40"), | ||||
|         ("ibm", "", "--360 --drivetype=40"), | ||||
|         ("ibm", "", "--720_96"), | ||||
|         ("ibm", "", "--720_135"), | ||||
|         ("mac", "scripts/mac400_test.textpb", "--400"), | ||||
|         ("mac", "scripts/mac800_test.textpb", "--800"), | ||||
|         ("n88basic", "", ""), | ||||
|         ("rx50", "", ""), | ||||
|         ("tartu", "", "--390 40track_drive"), | ||||
|         ("tartu", "", "--390 --drivetype=40"), | ||||
|         ("tartu", "", "--780"), | ||||
|         ("tids990", "", ""), | ||||
|         ("victor9k", "", "--612"), | ||||
| @@ -93,7 +93,7 @@ else: | ||||
|                     + c[1] | ||||
|                     + "' '" | ||||
|                     + c[2] | ||||
|                     + "' $(dir $[outs[0]]) > /dev/null" | ||||
|                     + "' $[dirname(filenameof(outs[0]))] > /dev/null" | ||||
|                 ], | ||||
|                 label="CORPUSTEST", | ||||
|             ) | ||||
| @@ -104,15 +104,15 @@ export( | ||||
|     name="all", | ||||
|     items={ | ||||
|         "fluxengine$(EXT)": "src+fluxengine", | ||||
|         "fluxengine-gui$(EXT)": "src/gui", | ||||
|         "fluxengine-gui$(EXT)": "src/gui2", | ||||
|         "brother120tool$(EXT)": "tools+brother120tool", | ||||
|         "brother240tool$(EXT)": "tools+brother240tool", | ||||
|         "upgrade-flux-file$(EXT)": "tools+upgrade-flux-file", | ||||
|     } | ||||
|     | ( | ||||
|         { | ||||
|             "FluxEngine.pkg": "src/gui+fluxengine_pkg", | ||||
|             "FluxEngine.app.zip": "src/gui+fluxengine_app_zip", | ||||
|             "FluxEngine.pkg": "src/gui2+fluxengine_pkg", | ||||
|             "FluxEngine.app.zip": "src/gui2+fluxengine_app_zip", | ||||
|         } | ||||
|         if config.osx | ||||
|         else {} | ||||
|   | ||||
							
								
								
									
										75
									
								
								build/ab.mk
									
									
									
									
									
								
							
							
						
						
									
										75
									
								
								build/ab.mk
									
									
									
									
									
								
							| @@ -15,16 +15,17 @@ HOSTCC ?= gcc | ||||
| HOSTCXX ?= g++ | ||||
| HOSTAR ?= ar | ||||
| HOSTCFLAGS ?= -g -Og | ||||
| HOSTCXXFLAGS ?= $(HOSTCFLAGS) | ||||
| HOSTLDFLAGS ?= -g | ||||
|  | ||||
| CC ?= $(HOSTCC) | ||||
| CXX ?= $(HOSTCXX) | ||||
| AR ?= $(HOSTAR) | ||||
| CFLAGS ?= $(HOSTCFLAGS) | ||||
| CXXFLAGS ?= $(CFLAGS) | ||||
| LDFLAGS ?= $(HOSTLDFLAGS) | ||||
|  | ||||
| export PKG_CONFIG | ||||
| export HOST_PKG_CONFIG | ||||
| NINJA ?= ninja | ||||
|  | ||||
| ifdef VERBOSE | ||||
| 	hide = | ||||
| @@ -63,37 +64,36 @@ EXT ?= | ||||
|  | ||||
| CWD=$(shell pwd) | ||||
|  | ||||
| ifeq ($(AB_ENABLE_PROGRESS_INFO),true) | ||||
| 	ifeq ($(PROGRESSINFO),) | ||||
| 	# The first make invocation here has to have its output discarded or else it | ||||
| 	# produces spurious 'Leaving directory' messages... don't know why. | ||||
| 	rulecount := $(strip $(shell $(MAKE) --no-print-directory -q $(OBJ)/build.mk PROGRESSINFO=1 > /dev/null \ | ||||
| 		&& $(MAKE) --no-print-directory -n $(MAKECMDGOALS) PROGRESSINFO=XXXPROGRESSINFOXXX | grep XXXPROGRESSINFOXXX | wc -l)) | ||||
| 	ruleindex := 1 | ||||
| 	PROGRESSINFO = "[$(ruleindex)/$(rulecount)]$(eval ruleindex := $(shell expr $(ruleindex) + 1)) " | ||||
| 	endif | ||||
| else | ||||
| 	PROGRESSINFO = "" | ||||
| endif | ||||
| define newline | ||||
|  | ||||
| PKG_CONFIG_HASHES = $(OBJ)/.pkg-config-hashes/target-$(word 1, $(shell $(PKG_CONFIG) --list-all | md5sum)) | ||||
| HOST_PKG_CONFIG_HASHES = $(OBJ)/.pkg-config-hashes/host-$(word 1, $(shell $(HOST_PKG_CONFIG) --list-all | md5sum)) | ||||
|  | ||||
| $(OBJ)/build.mk : $(PKG_CONFIG_HASHES) $(HOST_PKG_CONFIG_HASHES) | ||||
| $(PKG_CONFIG_HASHES) $(HOST_PKG_CONFIG_HASHES) &: | ||||
| 	$(hide) rm -rf $(OBJ)/.pkg-config-hashes | ||||
| 	$(hide) mkdir -p $(OBJ)/.pkg-config-hashes | ||||
| 	$(hide) touch $(PKG_CONFIG_HASHES) $(HOST_PKG_CONFIG_HASHES) | ||||
| endef | ||||
|  | ||||
| include $(OBJ)/build.mk | ||||
| define check_for_command | ||||
|   $(shell command -v $1 >/dev/null || (echo "Required command '$1' missing" >&2 && kill $$PPID)) | ||||
| endef | ||||
|  | ||||
| ifeq ($(OSX),yes) | ||||
| 	MAKEFLAGS += -r -j$(shell sysctl -n hw.logicalcpu) | ||||
| else | ||||
| 	MAKEFLAGS += -r -j$(shell nproc) | ||||
| endif | ||||
| $(call check_for_command,ninja) | ||||
| $(call check_for_command,cmp) | ||||
| $(call check_for_command,$(PYTHON)) | ||||
|  | ||||
| .DELETE_ON_ERROR: | ||||
| pkg-config-hash = $(shell ($(PKG_CONFIG) --list-all && $(HOST_PKG_CONFIG) --list-all) | md5sum) | ||||
| build-files = $(shell find . -name .obj -prune -o \( -name 'build.py' -a -type f \) -print) $(wildcard build/*.py) $(wildcard config.py) | ||||
| build-file-timestamps = $(shell ls -l $(build-files) | md5sum) | ||||
|  | ||||
| # Wipe the build file (forcing a regeneration) if the make environment is different. | ||||
| # (Conveniently, this includes the pkg-config hash calculated above.) | ||||
|  | ||||
| ignored-variables = MAKE_RESTARTS .VARIABLES MAKECMDGOALS MAKEFLAGS MFLAGS PAGER _ \ | ||||
| 	DESKTOP_STARTUP_ID XAUTHORITY ICEAUTHORITY SSH_AUTH_SOCK SESSION_MANAGER \ | ||||
| 	INVOCATION_ID SYSTEMD_EXEC_PID MANAGER_PID SSH_AGENT_PID JOURNAL_STREAM \ | ||||
| 	GPG_TTY WINDOWID MANAGERPID MAKE_TERMOUT MAKE_TERMERR OLDPWD | ||||
| $(shell mkdir -p $(OBJ)) | ||||
| $(file >$(OBJ)/newvars.txt,$(foreach v,$(filter-out $(ignored-variables),$(.VARIABLES)),$(v)=$($(v))$(newline))) | ||||
| $(shell touch $(OBJ)/vars.txt) | ||||
| #$(shell diff -u $(OBJ)/vars.txt $(OBJ)/newvars.txt >&2) | ||||
| $(shell cmp -s $(OBJ)/newvars.txt $(OBJ)/vars.txt || (rm -f $(OBJ)/build.ninja && echo "Environment changed --- regenerating" >&2)) | ||||
| $(shell mv $(OBJ)/newvars.txt $(OBJ)/vars.txt) | ||||
|  | ||||
| .PHONY: update-ab | ||||
| update-ab: | ||||
| @@ -107,10 +107,19 @@ clean:: | ||||
| 	@echo CLEAN | ||||
| 	$(hide) rm -rf $(OBJ) | ||||
|  | ||||
| compile_commands.json: $(OBJ)/build.ninja | ||||
| 	+$(hide) $(NINJA) -f $(OBJ)/build.ninja -t compdb > $@ | ||||
|  | ||||
| export PYTHONHASHSEED = 1 | ||||
| build-files = $(shell find . -name 'build.py') $(wildcard build/*.py) $(wildcard config.py) | ||||
| $(OBJ)/build.mk: Makefile $(build-files) build/ab.mk | ||||
| $(OBJ)/build.ninja $(OBJ)/build.targets &: | ||||
| 	@echo "AB" | ||||
| 	@mkdir -p $(OBJ) | ||||
| 	$(hide) $(PYTHON) -X pycache_prefix=$(OBJ)/__pycache__ build/ab.py -o $@ build.py \ | ||||
| 		|| rm -f $@ | ||||
| 	$(hide) $(PYTHON) -X pycache_prefix=$(OBJ)/__pycache__ build/ab.py \ | ||||
| 		-o $(OBJ) build.py \ | ||||
| 		-v $(OBJ)/vars.txt \ | ||||
| 		|| (rm -f $@ && false) | ||||
|  | ||||
| include $(OBJ)/build.targets | ||||
| .PHONY: $(ninja-targets) | ||||
| .NOTPARALLEL: | ||||
| $(ninja-targets): $(OBJ)/build.ninja | ||||
| 	+$(hide) $(NINJA) -f $(OBJ)/build.ninja $@ | ||||
|   | ||||
							
								
								
									
										2
									
								
								build/ab.ninja
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								build/ab.ninja
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| rule rule | ||||
|     command = $command | ||||
							
								
								
									
										274
									
								
								build/ab.py
									
									
									
									
									
								
							
							
						
						
									
										274
									
								
								build/ab.py
									
									
									
									
									
								
							| @@ -1,36 +1,32 @@ | ||||
| from collections import namedtuple | ||||
| from copy import copy | ||||
| from importlib.machinery import SourceFileLoader, PathFinder, ModuleSpec | ||||
| from os.path import * | ||||
| from pathlib import Path | ||||
| from typing import Iterable | ||||
| import argparse | ||||
| import ast | ||||
| import builtins | ||||
| from copy import copy | ||||
| import functools | ||||
| import hashlib | ||||
| import importlib | ||||
| import importlib.util | ||||
| from importlib.machinery import ( | ||||
|     SourceFileLoader, | ||||
|     PathFinder, | ||||
|     ModuleSpec, | ||||
| ) | ||||
| import inspect | ||||
| import os | ||||
| import re | ||||
| import string | ||||
| import sys | ||||
| import hashlib | ||||
| import re | ||||
| import ast | ||||
| from collections import namedtuple | ||||
| import types | ||||
|  | ||||
| VERBOSE_MK_FILE = False | ||||
| VERBOSE_NINJA_FILE = False | ||||
|  | ||||
| verbose = False | ||||
| quiet = False | ||||
| cwdStack = [""] | ||||
| targets = {} | ||||
| unmaterialisedTargets = {}  # dict, not set, to get consistent ordering | ||||
| materialisingStack = [] | ||||
| defaultGlobals = {} | ||||
| globalId = 1 | ||||
| wordCache = {} | ||||
| outputTargets = set() | ||||
|  | ||||
| RE_FORMAT_SPEC = re.compile( | ||||
|     r"(?:(?P<fill>[\s\S])?(?P<align>[<>=^]))?" | ||||
| @@ -52,6 +48,15 @@ sys.path += ["."] | ||||
| old_import = builtins.__import__ | ||||
|  | ||||
|  | ||||
| class Environment(types.SimpleNamespace): | ||||
|     def setdefault(self, name, value): | ||||
|         if not hasattr(self, name): | ||||
|             setattr(self, name, value) | ||||
|  | ||||
|  | ||||
| G = Environment() | ||||
|  | ||||
|  | ||||
| class PathFinderImpl(PathFinder): | ||||
|     def find_spec(self, fullname, path, target=None): | ||||
|         # The second test here is needed for Python 3.9. | ||||
| @@ -102,27 +107,88 @@ def error(message): | ||||
|     raise ABException(message) | ||||
|  | ||||
|  | ||||
| def _undo_escaped_dollar(s, op): | ||||
|     return s.replace(f"$${op}", f"${op}") | ||||
|  | ||||
|  | ||||
| class BracketedFormatter(string.Formatter): | ||||
|     def parse(self, format_string): | ||||
|         while format_string: | ||||
|             left, *right = format_string.split("$[", 1) | ||||
|             if not right: | ||||
|                 yield (left, None, None, None) | ||||
|             m = re.search(f"(?:[^$]|^)()\\$\\[()", format_string) | ||||
|             if not m: | ||||
|                 yield ( | ||||
|                     _undo_escaped_dollar(format_string, "["), | ||||
|                     None, | ||||
|                     None, | ||||
|                     None, | ||||
|                 ) | ||||
|                 break | ||||
|             right = right[0] | ||||
|             left = format_string[: m.start(1)] | ||||
|             right = format_string[m.end(2) :] | ||||
|  | ||||
|             offset = len(right) + 1 | ||||
|             try: | ||||
|                 ast.parse(right) | ||||
|             except SyntaxError as e: | ||||
|                 if not str(e).startswith("unmatched ']'"): | ||||
|                 if not str(e).startswith(f"unmatched ']'"): | ||||
|                     raise e | ||||
|                 offset = e.offset | ||||
|  | ||||
|             expr = right[0 : offset - 1] | ||||
|             format_string = right[offset:] | ||||
|  | ||||
|             yield (left if left else None, expr, None, None) | ||||
|             yield ( | ||||
|                 _undo_escaped_dollar(left, "[") if left else None, | ||||
|                 expr, | ||||
|                 None, | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|  | ||||
| class GlobalFormatter(string.Formatter): | ||||
|     def parse(self, format_string): | ||||
|         while format_string: | ||||
|             m = re.search(f"(?:[^$]|^)()\\$\\(([^)]*)\\)()", format_string) | ||||
|             if not m: | ||||
|                 yield ( | ||||
|                     format_string, | ||||
|                     None, | ||||
|                     None, | ||||
|                     None, | ||||
|                 ) | ||||
|                 break | ||||
|             left = format_string[: m.start(1)] | ||||
|             var = m[2] | ||||
|             format_string = format_string[m.end(3) :] | ||||
|  | ||||
|             yield ( | ||||
|                 left if left else None, | ||||
|                 var, | ||||
|                 None, | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|     def get_field(self, name, a1, a2): | ||||
|         return ( | ||||
|             getattr(G, name), | ||||
|             False, | ||||
|         ) | ||||
|  | ||||
|     def format_field(self, value, format_spec): | ||||
|         if not value: | ||||
|             return "" | ||||
|         return str(value) | ||||
|  | ||||
|  | ||||
| globalFormatter = GlobalFormatter() | ||||
|  | ||||
|  | ||||
| def substituteGlobalVariables(value): | ||||
|     while True: | ||||
|         oldValue = value | ||||
|         value = globalFormatter.format(value) | ||||
|         if value == oldValue: | ||||
|             return _undo_escaped_dollar(value, "(") | ||||
|  | ||||
|  | ||||
| def Rule(func): | ||||
| @@ -187,12 +253,10 @@ def _isiterable(xs): | ||||
|  | ||||
| class Target: | ||||
|     def __init__(self, cwd, name): | ||||
|         if verbose: | ||||
|             print("rule('%s', cwd='%s'" % (name, cwd)) | ||||
|         self.name = name | ||||
|         self.localname = self.name.rsplit("+")[-1] | ||||
|         self.traits = set() | ||||
|         self.dir = join("$(OBJ)", name) | ||||
|         self.dir = join(G.OBJ, name) | ||||
|         self.ins = [] | ||||
|         self.outs = [] | ||||
|         self.deps = [] | ||||
| @@ -232,7 +296,8 @@ class Target: | ||||
|                     [selfi.templateexpand(f) for f in filenamesof(value)] | ||||
|                 ) | ||||
|  | ||||
|         return Formatter().format(s) | ||||
|         s = Formatter().format(s) | ||||
|         return substituteGlobalVariables(s) | ||||
|  | ||||
|     def materialise(self, replacing=False): | ||||
|         if self not in unmaterialisedTargets: | ||||
| @@ -341,10 +406,10 @@ def targetof(value, cwd=None): | ||||
|             elif value.startswith("./"): | ||||
|                 value = normpath(join(cwd, value)) | ||||
|         # Explicit directories are always raw files. | ||||
|         elif value.endswith("/"): | ||||
|         if value.endswith("/"): | ||||
|             return _filetarget(value, cwd) | ||||
|         # Anything starting with a variable expansion is always a raw file. | ||||
|         elif value.startswith("$"): | ||||
|         # Anything in .obj is a raw file. | ||||
|         elif value.startswith(outputdir) or value.startswith(G.OBJ): | ||||
|             return _filetarget(value, cwd) | ||||
|  | ||||
|         # If this is not a rule lookup... | ||||
| @@ -467,78 +532,75 @@ def emit(*args, into=None): | ||||
|     if into is not None: | ||||
|         into += [s] | ||||
|     else: | ||||
|         outputFp.write(s) | ||||
|         ninjaFp.write(s) | ||||
|  | ||||
|  | ||||
| def shell(*args): | ||||
|     s = "".join(args) + "\n" | ||||
|     shellFp.write(s) | ||||
|  | ||||
|  | ||||
| def emit_rule(self, ins, outs, cmds=[], label=None): | ||||
|     name = self.name | ||||
|     fins_list = filenamesof(ins) | ||||
|     fins = set(fins_list) | ||||
|     fouts = filenamesof(outs) | ||||
|     nonobjs = [f for f in fouts if not f.startswith("$(OBJ)")] | ||||
|     fins = [self.templateexpand(f) for f in set(filenamesof(ins))] | ||||
|     fouts = [self.templateexpand(f) for f in filenamesof(outs)] | ||||
|  | ||||
|     global outputTargets | ||||
|     outputTargets.update(fouts) | ||||
|     outputTargets.add(name) | ||||
|  | ||||
|     emit("") | ||||
|     if VERBOSE_MK_FILE: | ||||
|     if VERBOSE_NINJA_FILE: | ||||
|         for k, v in self.args.items(): | ||||
|             emit(f"# {k} = {v}") | ||||
|  | ||||
|     lines = [] | ||||
|     if nonobjs: | ||||
|         emit("clean::", into=lines) | ||||
|         emit("\t$(hide) rm -f", *nonobjs, into=lines) | ||||
|  | ||||
|     hashable = cmds + fins_list + fouts | ||||
|     hash = hashlib.sha1(bytes("\n".join(hashable), "utf-8")).hexdigest() | ||||
|     hashfile = join(self.dir, f"hash_{hash}") | ||||
|  | ||||
|     global globalId | ||||
|     emit(".PHONY:", name, into=lines) | ||||
|     if outs: | ||||
|         outsn = globalId | ||||
|         globalId = globalId + 1 | ||||
|         insn = globalId | ||||
|         globalId = globalId + 1 | ||||
|         os.makedirs(self.dir, exist_ok=True) | ||||
|         rule = [] | ||||
|  | ||||
|         emit(f"OUTS_{outsn}", "=", *fouts, into=lines) | ||||
|         emit(f"INS_{insn}", "=", *fins, into=lines) | ||||
|         emit(name, ":", f"$(OUTS_{outsn})", into=lines) | ||||
|         emit(hashfile, ":", into=lines) | ||||
|         emit(f"\t@mkdir -p {self.dir}", into=lines) | ||||
|         emit(f"\t@touch {hashfile}", into=lines) | ||||
|         emit( | ||||
|             f"$(OUTS_{outsn})", | ||||
|             "&:" if len(fouts) > 1 else ":", | ||||
|             f"$(INS_{insn})", | ||||
|             hashfile, | ||||
|             into=lines, | ||||
|         ) | ||||
|         if G.AB_SANDBOX == "yes": | ||||
|             sandbox = join(self.dir, "sandbox") | ||||
|             emit(f"rm -rf {sandbox}", into=rule) | ||||
|             emit( | ||||
|                 f"{G.PYTHON} build/_sandbox.py --link -s", sandbox, *fins, into=rule | ||||
|             ) | ||||
|             for c in cmds: | ||||
|                 emit(f"(cd {sandbox} &&", c, ")", into=rule) | ||||
|             emit( | ||||
|                 f"{G.PYTHON} build/_sandbox.py --export -s", | ||||
|                 sandbox, | ||||
|                 *fouts, | ||||
|                 into=rule, | ||||
|             ) | ||||
|         else: | ||||
|             for c in cmds: | ||||
|                 emit(c, into=rule) | ||||
|  | ||||
|         ruletext = "".join(rule) | ||||
|         if len(ruletext) > 7000: | ||||
|             rulehash = hashlib.sha1(ruletext.encode()).hexdigest() | ||||
|  | ||||
|             rulef = join(self.dir, f"rule-{rulehash}.sh") | ||||
|             with open(rulef, "wt") as fp: | ||||
|                 fp.write("set -e\n") | ||||
|                 fp.write(ruletext) | ||||
|  | ||||
|             emit("build", *fouts, ":rule", *fins) | ||||
|             emit(" command=sh", rulef) | ||||
|         else: | ||||
|             emit("build", *fouts, ":rule", *fins) | ||||
|             emit( | ||||
|                 " command=", | ||||
|                 "&&".join([s.strip() for s in rule]).replace("$", "$$"), | ||||
|             ) | ||||
|         if label: | ||||
|             emit("\t$(hide)", "$(ECHO) $(PROGRESSINFO)" + label, into=lines) | ||||
|             emit(" description=", label) | ||||
|         emit("build", name, ":phony", *fouts) | ||||
|  | ||||
|         sandbox = join(self.dir, "sandbox") | ||||
|         emit("\t$(hide)", f"rm -rf {sandbox}", into=lines) | ||||
|         emit( | ||||
|             "\t$(hide)", | ||||
|             "$(PYTHON) build/_sandbox.py --link -s", | ||||
|             sandbox, | ||||
|             f"$(INS_{insn})", | ||||
|             into=lines, | ||||
|         ) | ||||
|         for c in cmds: | ||||
|             emit(f"\t$(hide) cd {sandbox} && (", c, ")", into=lines) | ||||
|         emit( | ||||
|             "\t$(hide)", | ||||
|             "$(PYTHON) build/_sandbox.py --export -s", | ||||
|             sandbox, | ||||
|             f"$(OUTS_{outsn})", | ||||
|             into=lines, | ||||
|         ) | ||||
|     else: | ||||
|         assert len(cmds) == 0, "rules with no outputs cannot have commands" | ||||
|         emit(name, ":", *fins, into=lines) | ||||
|         emit("build", name, ":phony", *fins) | ||||
|  | ||||
|     outputFp.write("".join(lines)) | ||||
|     emit("") | ||||
|  | ||||
|  | ||||
| @@ -585,47 +647,66 @@ def export(self, name=None, items: TargetsMap = {}, deps: Targets = []): | ||||
|         dest = self.targetof(dest) | ||||
|         outs += [dest] | ||||
|  | ||||
|         destf = filenameof(dest) | ||||
|         destf = self.templateexpand(filenameof(dest)) | ||||
|         outputTargets.update([destf]) | ||||
|  | ||||
|         srcs = filenamesof([src]) | ||||
|         assert ( | ||||
|             len(srcs) == 1 | ||||
|         ), "a dependency of an exported file must have exactly one output file" | ||||
|         srcf = self.templateexpand(srcs[0]) | ||||
|  | ||||
|         subrule = simplerule( | ||||
|             name=f"{self.localname}/{destf}", | ||||
|             cwd=self.cwd, | ||||
|             ins=[srcs[0]], | ||||
|             outs=[destf], | ||||
|             commands=["$(CP) -H %s %s" % (srcs[0], destf)], | ||||
|             label="", | ||||
|             commands=["$(CP) -H %s %s" % (srcf, destf)], | ||||
|             label="EXPORT", | ||||
|         ) | ||||
|         subrule.materialise() | ||||
|  | ||||
|     self.ins = [] | ||||
|     self.outs = deps + outs | ||||
|     outputTargets.add(name) | ||||
|  | ||||
|     emit("") | ||||
|     emit(".PHONY:", name) | ||||
|     emit(name, ":", *filenamesof(outs + deps)) | ||||
|     emit( | ||||
|         "build", | ||||
|         name, | ||||
|         ":phony", | ||||
|         *[self.templateexpand(f) for f in filenamesof(outs + deps)], | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     parser = argparse.ArgumentParser() | ||||
|     parser.add_argument("-v", "--verbose", action="store_true") | ||||
|     parser.add_argument("-q", "--quiet", action="store_true") | ||||
|     parser.add_argument("-o", "--output") | ||||
|     parser.add_argument("-v", "--varfile") | ||||
|     parser.add_argument("-o", "--outputdir") | ||||
|     parser.add_argument("-D", "--define", action="append", default=[]) | ||||
|     parser.add_argument("files", nargs="+") | ||||
|     args = parser.parse_args() | ||||
|  | ||||
|     global verbose | ||||
|     verbose = args.verbose | ||||
|  | ||||
|     global quiet | ||||
|     quiet = args.quiet | ||||
|  | ||||
|     global outputFp | ||||
|     outputFp = open(args.output, "wt") | ||||
|     vardefs = args.define | ||||
|     if args.varfile: | ||||
|         with open(args.varfile, "rt") as fp: | ||||
|             vardefs = vardefs + list(fp) | ||||
|  | ||||
|     for line in vardefs: | ||||
|         if "=" in line: | ||||
|             name, value = line.split("=", 1) | ||||
|             G.setdefault(name.strip(), value.strip()) | ||||
|     G.setdefault("AB_SANDBOX", "yes") | ||||
|  | ||||
|     global ninjaFp, shellFp, outputdir | ||||
|     outputdir = args.outputdir | ||||
|     G.setdefault("OBJ", outputdir) | ||||
|     ninjaFp = open(outputdir + "/build.ninja", "wt") | ||||
|     ninjaFp.write(f"include build/ab.ninja\n") | ||||
|  | ||||
|     for k in ["Rule"]: | ||||
|         defaultGlobals[k] = globals()[k] | ||||
| @@ -640,7 +721,10 @@ def main(): | ||||
|     while unmaterialisedTargets: | ||||
|         t = next(iter(unmaterialisedTargets)) | ||||
|         t.materialise() | ||||
|     emit("AB_LOADED = 1\n") | ||||
|  | ||||
|     with open(outputdir + "/build.targets", "wt") as fp: | ||||
|         fp.write("ninja-targets =") | ||||
|         fp.write(substituteGlobalVariables(" ".join(outputTargets))) | ||||
|  | ||||
|  | ||||
| main() | ||||
|   | ||||
							
								
								
									
										53
									
								
								build/c.py
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								build/c.py
									
									
									
									
									
								
							| @@ -7,23 +7,22 @@ from build.ab import ( | ||||
|     flatten, | ||||
|     simplerule, | ||||
|     emit, | ||||
|     G, | ||||
| ) | ||||
| from build.utils import filenamesmatchingof, stripext, collectattrs | ||||
| from build.utils import stripext, collectattrs | ||||
| from build.toolchain import Toolchain, HostToolchain | ||||
| from os.path import * | ||||
|  | ||||
| emit( | ||||
|     """ | ||||
| ifeq ($(OSX),no) | ||||
| STARTGROUP ?= -Wl,--start-group | ||||
| ENDGROUP ?= -Wl,--end-group | ||||
| endif | ||||
| """ | ||||
| ) | ||||
| if G.OSX != "yes": | ||||
|     G.STARTGROUP = "-Wl,--start-group" | ||||
|     G.ENDGROUP = "-Wl,--end-group" | ||||
| else: | ||||
|     G.STARTGROUP = "" | ||||
|     G.ENDGROUP = "" | ||||
|  | ||||
| Toolchain.CC = ["$(CC) -c -o $[outs[0]] $[ins[0]] $(CFLAGS) $[cflags]"] | ||||
| Toolchain.CPP = ["$(CC) -E -P -o $[outs] $[cflags] -x c $[ins]"] | ||||
| Toolchain.CXX = ["$(CXX) -c -o $[outs[0]] $[ins[0]] $(CFLAGS) $[cflags]"] | ||||
| Toolchain.CXX = ["$(CXX) -c -o $[outs[0]] $[ins[0]] $(CXXFLAGS) $[cflags]"] | ||||
| Toolchain.AR = ["$(AR) cqs $[outs[0]] $[ins]"] | ||||
| Toolchain.ARXX = ["$(AR) cqs $[outs[0]] $[ins]"] | ||||
| Toolchain.CLINK = [ | ||||
| @@ -70,13 +69,9 @@ def _toolchain_find_header_targets(deps, initial=[]): | ||||
| Toolchain.find_c_header_targets = _toolchain_find_header_targets | ||||
|  | ||||
|  | ||||
| HostToolchain.CC = [ | ||||
|     "$(HOSTCC) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]" | ||||
| ] | ||||
| HostToolchain.CC = ["$(HOSTCC) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]"] | ||||
| HostToolchain.CPP = ["$(HOSTCC) -E -P -o $[outs] $[cflags] -x c $[ins]"] | ||||
| HostToolchain.CXX = [ | ||||
|     "$(HOSTCXX) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]" | ||||
| ] | ||||
| HostToolchain.CXX = ["$(HOSTCXX) -c -o $[outs[0]] $[ins[0]] $(HOSTCFLAGS) $[cflags]"] | ||||
| HostToolchain.AR = ["$(HOSTAR) cqs $[outs[0]] $[ins]"] | ||||
| HostToolchain.ARXX = ["$(HOSTAR) cqs $[outs[0]] $[ins]"] | ||||
| HostToolchain.CLINK = [ | ||||
| @@ -102,9 +97,7 @@ def _indirect(deps, name): | ||||
|     return r | ||||
|  | ||||
|  | ||||
| def cfileimpl( | ||||
|     self, name, srcs, deps, suffix, commands, label, toolchain, cflags | ||||
| ): | ||||
| def cfileimpl(self, name, srcs, deps, suffix, commands, label, toolchain, cflags): | ||||
|     outleaf = "=" + stripext(basename(filenameof(srcs[0]))) + suffix | ||||
|  | ||||
|     hdr_deps = toolchain.find_c_header_targets(deps) | ||||
| @@ -114,9 +107,7 @@ def cfileimpl( | ||||
|         if ("cheader_deps" not in d.args) and ("clibrary_deps" not in d.args) | ||||
|     ] | ||||
|     hdr_files = collectattrs(targets=hdr_deps, name="cheader_files") | ||||
|     cflags = collectattrs( | ||||
|         targets=hdr_deps, name="caller_cflags", initial=cflags | ||||
|     ) | ||||
|     cflags = collectattrs(targets=hdr_deps, name="caller_cflags", initial=cflags) | ||||
|  | ||||
|     t = simplerule( | ||||
|         replaces=self, | ||||
| @@ -194,7 +185,7 @@ def findsources(self, srcs, deps, cflags, filerule, toolchain, cwd): | ||||
|     for s in flatten(srcs): | ||||
|         objs += [ | ||||
|             filerule( | ||||
|                 name=join(self.localname, _removeprefix(f, "$(OBJ)/")), | ||||
|                 name=join(self.localname, _removeprefix(f, G.OBJ + "/")), | ||||
|                 srcs=[f], | ||||
|                 deps=deps, | ||||
|                 cflags=sorted(set(cflags)), | ||||
| @@ -239,9 +230,7 @@ def libraryimpl( | ||||
|         i = 0 | ||||
|         for dest, src in hdrs.items(): | ||||
|             s = filenamesof([src]) | ||||
|             assert ( | ||||
|                 len(s) == 1 | ||||
|             ), "the target of a header must return exactly one file" | ||||
|             assert len(s) == 1, "the target of a header must return exactly one file" | ||||
|  | ||||
|             cs += [f"$(CP) $[ins[{i}]] $[outs[{i}]]"] | ||||
|             outs += ["=" + dest] | ||||
| @@ -431,15 +420,11 @@ def programimpl( | ||||
|     label, | ||||
|     filerule, | ||||
| ): | ||||
|     cfiles = findsources( | ||||
|         self, srcs, deps, cflags, filerule, toolchain, self.cwd | ||||
|     ) | ||||
|     cfiles = findsources(self, srcs, deps, cflags, filerule, toolchain, self.cwd) | ||||
|  | ||||
|     lib_deps = toolchain.find_c_library_targets(deps) | ||||
|     libs = collectattrs(targets=lib_deps, name="clibrary_files") | ||||
|     ldflags = collectattrs( | ||||
|         targets=lib_deps, name="caller_ldflags", initial=ldflags | ||||
|     ) | ||||
|     ldflags = collectattrs(targets=lib_deps, name="caller_ldflags", initial=ldflags) | ||||
|  | ||||
|     simplerule( | ||||
|         replaces=self, | ||||
| @@ -558,9 +543,7 @@ def hostcxxprogram( | ||||
|  | ||||
| def _cppfileimpl(self, name, srcs, deps, cflags, toolchain): | ||||
|     hdr_deps = _indirect(deps, "cheader_deps") | ||||
|     cflags = collectattrs( | ||||
|         targets=hdr_deps, name="caller_cflags", initial=cflags | ||||
|     ) | ||||
|     cflags = collectattrs(targets=hdr_deps, name="caller_cflags", initial=cflags) | ||||
|  | ||||
|     simplerule( | ||||
|         replaces=self, | ||||
|   | ||||
							
								
								
									
										10
									
								
								build/pkg.py
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								build/pkg.py
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | ||||
| from build.ab import Rule, Target | ||||
| from build.ab import Rule, Target, G | ||||
| import os | ||||
| import subprocess | ||||
|  | ||||
| @@ -31,8 +31,8 @@ class _PkgConfig: | ||||
|         return self.package_properties[p] | ||||
|  | ||||
|  | ||||
| TargetPkgConfig = _PkgConfig(os.getenv("PKG_CONFIG")) | ||||
| HostPkgConfig = _PkgConfig(os.getenv("HOST_PKG_CONFIG")) | ||||
| TargetPkgConfig = _PkgConfig(G.PKG_CONFIG) | ||||
| HostPkgConfig = _PkgConfig(G.HOST_PKG_CONFIG) | ||||
|  | ||||
|  | ||||
| def _package(self, name, package, fallback, pkgconfig): | ||||
| @@ -49,9 +49,7 @@ def _package(self, name, package, fallback, pkgconfig): | ||||
|         self.traits.update({"clibrary", "cxxlibrary"}) | ||||
|         return | ||||
|  | ||||
|     assert ( | ||||
|         fallback | ||||
|     ), f"Required package '{package}' not installed when materialising target '$[name]'" | ||||
|     assert fallback, f"Required package '{package}' not installed" | ||||
|  | ||||
|     if "cheader_deps" in fallback.args: | ||||
|         self.args["cheader_deps"] = fallback.args["cheader_deps"] | ||||
|   | ||||
| @@ -1,18 +1,16 @@ | ||||
| from build.ab import Rule, Targets, emit, simplerule, filenamesof | ||||
| from build.ab import Rule, Targets, emit, simplerule, filenamesof, G | ||||
| from build.utils import filenamesmatchingof, collectattrs | ||||
| from os.path import join, abspath, dirname, relpath | ||||
| from build.pkg import has_package | ||||
|  | ||||
| emit( | ||||
|     """ | ||||
| PROTOC ?= protoc | ||||
| HOSTPROTOC ?= protoc | ||||
| """ | ||||
| ) | ||||
| G.setdefault("PROTOC", "protoc") | ||||
| G.setdefault("PROTOC_SEPARATOR", ":") | ||||
| G.setdefault("HOSTPROTOC", "hostprotoc") | ||||
|  | ||||
| assert has_package("protobuf"), "required package 'protobuf' not installed" | ||||
|  | ||||
|  | ||||
|  | ||||
| def _getprotodeps(deps): | ||||
|     r = set() | ||||
|     for d in deps: | ||||
| @@ -23,7 +21,7 @@ def _getprotodeps(deps): | ||||
| @Rule | ||||
| def proto(self, name, srcs: Targets = [], deps: Targets = []): | ||||
|     protodeps = _getprotodeps(deps) | ||||
|     descriptorlist = ":".join( | ||||
|     descriptorlist = (G.PROTOC_SEPARATOR).join( | ||||
|         [ | ||||
|             relpath(f, start=self.dir) | ||||
|             for f in filenamesmatchingof(protodeps, "*.descriptor") | ||||
| @@ -50,7 +48,7 @@ def proto(self, name, srcs: Targets = [], deps: Targets = []): | ||||
|                             f"--descriptor_set_out={self.localname}.descriptor", | ||||
|                         ] | ||||
|                         + ( | ||||
|                             [f"--descriptor_set_in={descriptorlist}"] | ||||
|                             [f"--descriptor_set_in='{descriptorlist}'"] | ||||
|                             if descriptorlist | ||||
|                             else [] | ||||
|                         ) | ||||
| @@ -93,7 +91,7 @@ def protocc(self, name, srcs: Targets = [], deps: Targets = []): | ||||
|         outs += ["=" + cc, "=" + h] | ||||
|  | ||||
|     protodeps = _getprotodeps(deps + srcs) | ||||
|     descriptorlist = ":".join( | ||||
|     descriptorlist = G.PROTOC_SEPARATOR.join( | ||||
|         [ | ||||
|             relpath(f, start=self.dir) | ||||
|             for f in filenamesmatchingof(protodeps, "*.descriptor") | ||||
| @@ -114,7 +112,7 @@ def protocc(self, name, srcs: Targets = [], deps: Targets = []): | ||||
|                         "$(PROTOC)", | ||||
|                         "--proto_path=.", | ||||
|                         "--cpp_out=.", | ||||
|                         f"--descriptor_set_in={descriptorlist}", | ||||
|                         f"--descriptor_set_in='{descriptorlist}'", | ||||
|                     ] | ||||
|                     + protos | ||||
|                 ) | ||||
|   | ||||
| @@ -7,10 +7,13 @@ from build.ab import ( | ||||
|     cwdStack, | ||||
|     error, | ||||
|     simplerule, | ||||
|     G | ||||
| ) | ||||
| from os.path import relpath, splitext, join, basename, isfile | ||||
| from glob import iglob | ||||
| import fnmatch | ||||
| import subprocess | ||||
| import shutil | ||||
|  | ||||
|  | ||||
| def filenamesmatchingof(xs, pattern): | ||||
| @@ -51,6 +54,16 @@ def itemsof(pattern, root=None, cwd=None): | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def does_command_exist(cmd): | ||||
|     basecmd = cmd.strip().split()[0] | ||||
|     return shutil.which(basecmd) | ||||
|  | ||||
|  | ||||
| def shell(cmd): | ||||
|     r = subprocess.check_output([G.SHELL, "-c", cmd]) | ||||
|     return r.decode("utf-8").strip() | ||||
|  | ||||
|  | ||||
| @Rule | ||||
| def objectify(self, name, src: Target, symbol): | ||||
|     simplerule( | ||||
|   | ||||
| @@ -7,9 +7,7 @@ from build.ab import ( | ||||
|  | ||||
|  | ||||
| @Rule | ||||
| def zip( | ||||
|     self, name, flags="", items: TargetsMap = {}, extension="zip", label="ZIP" | ||||
| ): | ||||
| def zip(self, name, flags="", items: TargetsMap = {}, extension="zip", label="ZIP"): | ||||
|     cs = ["$(PYTHON) build/_zip.py -z $[outs]"] | ||||
|  | ||||
|     ins = [] | ||||
|   | ||||
| @@ -35,7 +35,7 @@ clibrary( | ||||
|         "./config.h", | ||||
|         "./src/adflib.h", | ||||
|     ], | ||||
|     cflags=["-Idep/adflib", "-Idep/adflib/src"], | ||||
|     cflags=["-Wno-stringop-overflow"], | ||||
|     hdrs={ | ||||
|         "adf_blk.h": "./src/adf_blk.h", | ||||
|         "adf_defs.h": "./src/adf_defs.h", | ||||
|   | ||||
							
								
								
									
										1
									
								
								dep/cli11
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/cli11
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/cli11 added at 89dc726939
									
								
							
							
								
								
									
										1
									
								
								dep/imgui
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/imgui
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/imgui added at 4d216d4510
									
								
							
							
								
								
									
										1
									
								
								dep/imhex
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/imhex
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/imhex added at a76eae2c11
									
								
							
							
								
								
									
										1
									
								
								dep/libromfs
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/libromfs
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/libromfs added at fa444f2995
									
								
							
							
								
								
									
										1
									
								
								dep/libwolv
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/libwolv
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/libwolv added at 56f77945fe
									
								
							
							
								
								
									
										1
									
								
								dep/lunasvg
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/lunasvg
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/lunasvg added at 83c58df810
									
								
							
							
								
								
									
										1
									
								
								dep/md4c
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/md4c
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/md4c added at 481fbfbdf7
									
								
							
							
								
								
									
										1
									
								
								dep/native-file-dialog
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/native-file-dialog
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/native-file-dialog added at 6db343ad34
									
								
							
							
								
								
									
										1
									
								
								dep/nlohmann_json
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/nlohmann_json
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/nlohmann_json added at 44bee1b138
									
								
							
							
								
								
									
										1
									
								
								dep/pattern-language
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/pattern-language
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/pattern-language added at f97999d4da
									
								
							
							
								
								
									
										1
									
								
								dep/throwing_ptr
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/throwing_ptr
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/throwing_ptr added at cd28490ebf
									
								
							
							
								
								
									
										1
									
								
								dep/xdgpp
									
									
									
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								dep/xdgpp
									
									
									
									
									
										Submodule
									
								
							 Submodule dep/xdgpp added at f01f810714
									
								
							| @@ -204,18 +204,18 @@ install some support packages. | ||||
|   - For Linux with Ubuntu/Debian: | ||||
| 	`libusb-1.0-0-dev`, `libsqlite3-dev`, `zlib1g-dev`, | ||||
| 	`libudev-dev`, `protobuf-compiler`, `libwxgtk3.0-gtk3-dev`, | ||||
| 	`libfmt-dev`, `python3`. | ||||
| 	`libfmt-dev`, `python3`. `ninja-build` | ||||
|   - For Linux with Fedora/Red Hat: | ||||
|     `git`, `make`, `gcc`, `gcc-c++`, `xxd`, `protobuf-compiler`, | ||||
|     `protobuf-devel`, `fmt-devel`, `systemd-devel`, `wxGTK3-devel`, | ||||
|     `libsqlite3x-devel` | ||||
|     `libsqlite3x-devel`, `ninja-build` | ||||
|   - For OSX with Homebrew: `libusb`, `pkg-config`, `sqlite`, | ||||
|     `protobuf`, `truncate`, `wxwidgets`, `fmt`. | ||||
|     `protobuf`, `truncate`, `wxwidgets`, `fmt`. `ninja` | ||||
|   - For Windows with WSL: `protobuf-c-compiler` `protobuf-devel` `fmt-devel` | ||||
|   `systemd-devel` `sqlite-devel` `wxGTK-devel` `mingw32-gcc` `mingw32-gcc-c++` | ||||
|   `mingw32-zlib-static` `mingw32-protobuf-static` `mingw32-sqlite-static` | ||||
|   `mingw32-wxWidgets3-static` `mingw32-libpng-static` `mingw32-libjpeg-static` | ||||
|   `mingw32-libtiff-static` `mingw32-nsis png2ico` | ||||
|   `mingw32-libtiff-static` `mingw32-nsis png2ico` `ninja-build` | ||||
|  | ||||
| These lists are not necessarily exhaustive --- please [get in | ||||
| touch](https://github.com/davidgiven/fluxengine/issues/new) if I've missed | ||||
|   | ||||
| @@ -1,15 +0,0 @@ | ||||
| 40track_drive | ||||
| ==== | ||||
| ## Adjust configuration for a 40-track drive | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to read from 40-track, 48tpi 5.25" drives. You have to tell it because there is | ||||
| no way to detect this automatically. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read ibm --180 40track_drive | ||||
| ``` | ||||
|  | ||||
| @@ -31,9 +31,9 @@ they might require nudging as the side order can't be reliably autodetected. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read acornadfs --160 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --320 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --640 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --800 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read acornadfs --1600 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --160 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --320 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --640 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --800 -s drive:0 -o acornadfs.img` | ||||
|   - `fluxengine read -c acornadfs --1600 -s drive:0 -o acornadfs.img` | ||||
|  | ||||
|   | ||||
| @@ -24,13 +24,13 @@ requires a bit of fiddling as they have the same tracks on twice. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read acorndfs --100 -s drive:0 -o acorndfs.img` | ||||
|   - `fluxengine read acorndfs --200 -s drive:0 -o acorndfs.img` | ||||
|   - `fluxengine read -c acorndfs --100 -s drive:0 -o acorndfs.img` | ||||
|   - `fluxengine read -c acorndfs --200 -s drive:0 -o acorndfs.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write acorndfs --100 -d drive:0 -i acorndfs.img` | ||||
|   - `fluxengine write acorndfs --200 -d drive:0 -i acorndfs.img` | ||||
|   - `fluxengine write -c acorndfs --100 -d drive:0 -i acorndfs.img` | ||||
|   - `fluxengine write -c acorndfs --200 -d drive:0 -i acorndfs.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -37,7 +37,7 @@ based on what looks right. If anyone knows _anything_ about these disks, | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read aeslanier -s drive:0 -o aeslanier.img` | ||||
|   - `fluxengine read -c aeslanier -s drive:0 -o aeslanier.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -20,11 +20,11 @@ profile. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read agat -s drive:0 -o agat.img` | ||||
|   - `fluxengine read -c agat -s drive:0 -o agat.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write agat -d drive:0 -i agat.img` | ||||
|   - `fluxengine write -c agat -d drive:0 -i agat.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -26,11 +26,11 @@ distinctly subpar and not particularly good at detecting errors. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read amiga -s drive:0 -o amiga.adf` | ||||
|   - `fluxengine read -c amiga -s drive:0 -o amiga.adf` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write amiga -d drive:0 -i amiga.adf` | ||||
|   - `fluxengine write -c amiga -d drive:0 -i amiga.adf` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -43,8 +43,8 @@ kayinfo.lbr | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read ampro --400 -s drive:0 -o ampro.img` | ||||
|   - `fluxengine read ampro --800 -s drive:0 -o ampro.img` | ||||
|   - `fluxengine read -c ampro --400 -s drive:0 -o ampro.img` | ||||
|   - `fluxengine read -c ampro --800 -s drive:0 -o ampro.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -58,13 +58,13 @@ volume. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read apple2 --140 -s drive:0 -o apple2.img` | ||||
|   - `fluxengine read apple2 --640 -s drive:0 -o apple2.img` | ||||
|   - `fluxengine read -c apple2 --140 -s drive:0 -o apple2.img` | ||||
|   - `fluxengine read -c apple2 --640 -s drive:0 -o apple2.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write apple2 --140 -d drive:0 -i apple2.img` | ||||
|   - `fluxengine write apple2 --640 -d drive:0 -i apple2.img` | ||||
|   - `fluxengine write -c apple2 --140 -d drive:0 -i apple2.img` | ||||
|   - `fluxengine write -c apple2 --640 -d drive:0 -i apple2.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -1,16 +0,0 @@ | ||||
| apple2_drive | ||||
| ==== | ||||
| ## Adjust configuration for a 40-track Apple II drive | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to adjust the pinout and track spacing to work with an Apple II | ||||
| drive.  This only works on Greaseweazle hardware and requires a custom | ||||
| connector. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read apple2 --160 apple2_drive | ||||
| ``` | ||||
|  | ||||
| @@ -29,25 +29,25 @@ Be aware that many PC drives (including mine) won't do the 82 track formats. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read atarist --360 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --370 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --400 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --410 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --720 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --740 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --800 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read atarist --820 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --360 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --370 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --400 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --410 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --720 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --740 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --800 -s drive:0 -o atarist.img` | ||||
|   - `fluxengine read -c atarist --820 -s drive:0 -o atarist.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write atarist --360 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --370 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --400 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --410 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --720 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --740 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --800 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write atarist --820 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --360 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --370 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --400 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --410 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --720 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --740 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --800 -d drive:0 -i atarist.img` | ||||
|   - `fluxengine write -c atarist --820 -d drive:0 -i atarist.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -22,9 +22,9 @@ on what was available at the time, with the same format on both. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read bk -s drive:0 -o bk800.img` | ||||
|   - `fluxengine read -c bk -s drive:0 -o bk800.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write bk -d drive:0 -i bk800.img` | ||||
|   - `fluxengine write -c bk -d drive:0 -i bk800.img` | ||||
|  | ||||
|   | ||||
| @@ -44,13 +44,13 @@ investigate. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read brother --120 -s drive:0 -o brother.img` | ||||
|   - `fluxengine read brother --240 -s drive:0 -o brother.img` | ||||
|   - `fluxengine read -c brother --120 -s drive:0 -o brother.img` | ||||
|   - `fluxengine read -c brother --240 -s drive:0 -o brother.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write brother --120 -d drive:0 -i brother.img` | ||||
|   - `fluxengine write brother --240 -d drive:0 -i brother.img` | ||||
|   - `fluxengine write -c brother --120 -d drive:0 -i brother.img` | ||||
|   - `fluxengine write -c brother --240 -d drive:0 -i brother.img` | ||||
|  | ||||
| Dealing with misaligned disks | ||||
| ----------------------------- | ||||
|   | ||||
| @@ -54,18 +54,18 @@ A CMD FD2000 disk (a popular third-party Commodore disk drive) | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read commodore --171 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --192 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --800 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --1042 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read commodore --1620 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --171 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --192 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --800 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --1042 -s drive:0 -o commodore.d64` | ||||
|   - `fluxengine read -c commodore --1620 -s drive:0 -o commodore.d64` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write commodore --171 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write commodore --192 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write commodore --800 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write commodore --1620 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --171 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --192 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --800 -d drive:0 -i commodore.d64` | ||||
|   - `fluxengine write -c commodore --1620 -d drive:0 -i commodore.d64` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -33,7 +33,7 @@ images. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read eco1 -s drive:0 -o eco1.img` | ||||
|   - `fluxengine read -c eco1 -s drive:0 -o eco1.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -15,5 +15,5 @@ format itself is yet another IBM scheme variant. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read epsonpf10 -s drive:0 -o epsonpf10.img` | ||||
|   - `fluxengine read -c epsonpf10 -s drive:0 -o epsonpf10.img` | ||||
|  | ||||
|   | ||||
| @@ -36,7 +36,7 @@ touch](https://github.com/davidgiven/fluxengine/issues/new). | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read f85 -s drive:0 -o f85.img` | ||||
|   - `fluxengine read -c f85 -s drive:0 -o f85.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -30,7 +30,7 @@ I don't have access to one of those disks. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read fb100 -s drive:0 -o fb100.img` | ||||
|   - `fluxengine read -c fb100 -s drive:0 -o fb100.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -23,17 +23,17 @@ encoding scheme. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read hplif --264 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read hplif --608 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read hplif --616 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read hplif --770 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --264 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --608 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --616 -s drive:0 -o hplif.img` | ||||
|   - `fluxengine read -c hplif --770 -s drive:0 -o hplif.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write hplif --264 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write hplif --608 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write hplif --616 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write hplif --770 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --264 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --608 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --616 -d drive:0 -i hplif.img` | ||||
|   - `fluxengine write -c hplif --770 -d drive:0 -i hplif.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -55,30 +55,30 @@ image format. FluxEngine will use these parameters. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read ibm --auto -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --160 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --180 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --320 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --360 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --720_96 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --720_135 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1200 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1232 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1440 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read ibm --1680 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --auto -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --160 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --180 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --320 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --360 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --720_96 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --720_135 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1200 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1232 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1440 -s drive:0 -o ibm.img` | ||||
|   - `fluxengine read -c ibm --1680 -s drive:0 -o ibm.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write ibm --160 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --180 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --320 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --360 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --720_96 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --720_135 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1200 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1232 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1440 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write ibm --1680 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --160 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --180 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --320 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --360 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --720_96 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --720_135 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1200 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1232 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1440 -d drive:0 -i ibm.img` | ||||
|   - `fluxengine write -c ibm --1680 -d drive:0 -i ibm.img` | ||||
|  | ||||
| Mixed-format disks | ||||
| ------------------ | ||||
|   | ||||
| @@ -15,5 +15,5 @@ track! Other than that it's another IBM scheme variation. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read icl30 -s drive:0 -o icl30.img` | ||||
|   - `fluxengine read -c icl30 -s drive:0 -o icl30.img` | ||||
|  | ||||
|   | ||||
							
								
								
									
										47
									
								
								doc/disk-juku.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								doc/disk-juku.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | ||||
| juku | ||||
| ==== | ||||
| ## CP/M | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| Juku E5104 is an Estonian school computer from late 1980s and | ||||
| early 1990s. It was designed by EKTA in 1985, and starting | ||||
| from 1988 produced in Narva "Baltijets" factory. Arguably | ||||
| the school computer was technically outdated already when | ||||
| released, but still occupies a precious spot in the memories | ||||
| of a whole generation of Estonian IT professionals. | ||||
|  | ||||
| The system uses dual 5.25 inch 2ce9 | ||||
| diskette drive with regular MFM encoded DSDD. The disks have | ||||
| a sector skew factor 2 and tracks are written on one side of | ||||
| the floppy until it is full and then continued on the other | ||||
| side, starting from the outside of the disk again. This differs | ||||
| from the most common alternating sides method and somewhat | ||||
| complicates reading CP/M filesystem content with common tools. | ||||
|  | ||||
| Mostly 800kB (786kB) DSDD disks were used, but there are also | ||||
| 400kB (386kB) SSDD floppies in circulation. | ||||
|  | ||||
| ## References (all in Estonian) | ||||
|  | ||||
|   - [How to read/write Juku disk images?](https://j3k.infoaed.ee/kettad/) | ||||
|   - [List of recovered Juku software](https://j3k.infoaed.ee/tarkvara-kataloog/) | ||||
|   - [System disks for E5104](https://elektroonikamuuseum.ee/juku_arvuti_tarkvara.html) | ||||
|  | ||||
| ## Options | ||||
|  | ||||
|   - Format variants: | ||||
|       - `800`: 800kB 80-track 10-sector DSDD | ||||
|       - `400`: 400kB 80-track 10-sector SSDD | ||||
|  | ||||
| ## Examples | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read -c juku --800 -s drive:0 -o image.juk` | ||||
|   - `fluxengine read -c juku --400 -s drive:0 -o image.juk` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write -c juku --800 -d drive:0 -i image.juk` | ||||
|   - `fluxengine write -c juku --400 -d drive:0 -i image.juk` | ||||
|  | ||||
| @@ -47,13 +47,13 @@ standard for disk images is to omit it. If you want them, specify that you want | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read mac --400 -s drive:0 -o mac.dsk` | ||||
|   - `fluxengine read mac --800 -s drive:0 -o mac.dsk` | ||||
|   - `fluxengine read -c mac --400 -s drive:0 -o mac.dsk` | ||||
|   - `fluxengine read -c mac --800 -s drive:0 -o mac.dsk` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write mac --400 -d drive:0 -i mac.dsk` | ||||
|   - `fluxengine write mac --800 -d drive:0 -i mac.dsk` | ||||
|   - `fluxengine write -c mac --400 -d drive:0 -i mac.dsk` | ||||
|   - `fluxengine write -c mac --800 -d drive:0 -i mac.dsk` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -52,7 +52,7 @@ need to apply extra options to change the format if desired. | ||||
|  | ||||
| ## Options | ||||
|  | ||||
|   - : | ||||
|   - $format: | ||||
|       - `143`: 143kB 5.25" SSDD hard-sectored; Micropolis MetaFloppy Mod I | ||||
|       - `287`: 287kB 5.25" DSDD hard-sectored; Micropolis MetaFloppy Mod I | ||||
|       - `315`: 315kB 5.25" SSDD hard-sectored; Micropolis MetaFloppy Mod II | ||||
| @@ -63,11 +63,11 @@ need to apply extra options to change the format if desired. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read micropolis -s drive:0 -o micropolis.img` | ||||
|   - `fluxengine read -c micropolis -s drive:0 -o micropolis.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write micropolis -d drive:0 -i micropolis.img` | ||||
|   - `fluxengine write -c micropolis -d drive:0 -i micropolis.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -52,10 +52,10 @@ Words are all stored little-endian. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read mx --110 -s drive:0 -o mx.img` | ||||
|   - `fluxengine read mx --220ds -s drive:0 -o mx.img` | ||||
|   - `fluxengine read mx --220ss -s drive:0 -o mx.img` | ||||
|   - `fluxengine read mx --440 -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --110 -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --220ds -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --220ss -s drive:0 -o mx.img` | ||||
|   - `fluxengine read -c mx --440 -s drive:0 -o mx.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -18,9 +18,9 @@ boot ROM could only read single density data.) | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read n88basic -s drive:0 -o n88basic.img` | ||||
|   - `fluxengine read -c n88basic -s drive:0 -o n88basic.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write n88basic -d drive:0 -i n88basic.img` | ||||
|   - `fluxengine write -c n88basic -d drive:0 -i n88basic.img` | ||||
|  | ||||
|   | ||||
| @@ -31,15 +31,15 @@ equivalent to .img images. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read northstar --87 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read northstar --175 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read northstar --350 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read -c northstar --87 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read -c northstar --175 -s drive:0 -o northstar.nsi` | ||||
|   - `fluxengine read -c northstar --350 -s drive:0 -o northstar.nsi` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write northstar --87 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write northstar --175 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write northstar --350 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write -c northstar --87 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write -c northstar --175 -d drive:0 -i northstar.nsi` | ||||
|   - `fluxengine write -c northstar --350 -d drive:0 -i northstar.nsi` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -24,9 +24,9 @@ and, oddly, swapped sides. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read psos -s drive:0 -o pme.img` | ||||
|   - `fluxengine read -c psos -s drive:0 -o pme.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write psos -d drive:0 -i pme.img` | ||||
|   - `fluxengine write -c psos -d drive:0 -i pme.img` | ||||
|  | ||||
|   | ||||
| @@ -40,9 +40,9 @@ for assistance with this! | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read rolandd20 -s drive:0 -o rolandd20.img` | ||||
|   - `fluxengine read -c rolandd20 -s drive:0 -o rolandd20.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write rolandd20 -d drive:0 -i rolandd20.img` | ||||
|   - `fluxengine write -c rolandd20 -d drive:0 -i rolandd20.img` | ||||
|  | ||||
|   | ||||
| @@ -15,9 +15,9 @@ vanilla single-sided IBM scheme variation. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read rx50 -s drive:0 -o rx50.img` | ||||
|   - `fluxengine read -c rx50 -s drive:0 -o rx50.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write rx50 -d drive:0 -i rx50.img` | ||||
|   - `fluxengine write -c rx50 -d drive:0 -i rx50.img` | ||||
|  | ||||
|   | ||||
| @@ -1,15 +0,0 @@ | ||||
| shugart_drive | ||||
| ==== | ||||
| ## Adjust configuration for a Shugart drive | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| This is an extension profile; adding this to the command line will configure | ||||
| FluxEngine to adjust the pinout to work with a Shugart drive. This only works | ||||
| on Greaseweazle hardware. | ||||
|  | ||||
| For example: | ||||
|  | ||||
| ``` | ||||
| fluxengine read ibm --720 shugart_drive | ||||
| ``` | ||||
|  | ||||
| @@ -26,7 +26,7 @@ this is completely correct, so don't trust it! | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read smaky6 -s drive:0 -o smaky6.img` | ||||
|   - `fluxengine read -c smaky6 -s drive:0 -o smaky6.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -34,13 +34,13 @@ FluxEngine supports reading and writing Tartu disks with CP/M filesystem access. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read tartu --390 -s drive:0 -o tartu.img` | ||||
|   - `fluxengine read tartu --780 -s drive:0 -o tartu.img` | ||||
|   - `fluxengine read -c tartu --390 -s drive:0 -o tartu.img` | ||||
|   - `fluxengine read -c tartu --780 -s drive:0 -o tartu.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write tartu --390 -d drive:0 -i tartu.img` | ||||
|   - `fluxengine write tartu --780 -d drive:0 -i tartu.img` | ||||
|   - `fluxengine write -c tartu --390 -d drive:0 -i tartu.img` | ||||
|   - `fluxengine write -c tartu --780 -d drive:0 -i tartu.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
							
								
								
									
										26
									
								
								doc/disk-ti99.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								doc/disk-ti99.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| ti99 | ||||
| ==== | ||||
| ## 90kB 35-track SSSD | ||||
| <!-- This file is automatically generated. Do not edit. --> | ||||
|  | ||||
| The TI-99 was a deeply weird microcomputer from 1981, whose main claim to fame | ||||
| was being built around a 16-bit TMS9900 CPU --- and also having only 256 bytes | ||||
| of system RAM, with an additional 16kB of video RAM, requiring the BASIC to | ||||
| store the user's program in video RAM. | ||||
|  | ||||
| It had an optional rack-mount expansion system with an optional disk drive. This | ||||
| was controlled by a standard FD1771 or FD179x chip, meaning a relatively normal | ||||
| IBM-scheme disk format of 35 tracks containing nine 256-byte sectors. | ||||
|  | ||||
| FluxEngine can read these. | ||||
|  | ||||
| ## Options | ||||
|  | ||||
| (no options) | ||||
|  | ||||
| ## Examples | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read -c ti99 -s drive:0 -o ti99.img` | ||||
|  | ||||
| @@ -26,11 +26,11 @@ FluxEngine will read and write these (but only the DSDD MFM variant). | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read tids990 -s drive:0 -o tids990.img` | ||||
|   - `fluxengine read -c tids990 -s drive:0 -o tids990.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write tids990 -d drive:0 -i tids990.img` | ||||
|   - `fluxengine write -c tids990 -d drive:0 -i tids990.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -20,8 +20,8 @@ on the precise format. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read tiki --90 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read tiki --200 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read tiki --400 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read tiki --800 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --90 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --200 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --400 -s drive:0 -o tiki.img` | ||||
|   - `fluxengine read -c tiki --800 -s drive:0 -o tiki.img` | ||||
|  | ||||
|   | ||||
| @@ -46,13 +46,13 @@ FluxEngine can read and write both the single-sided and double-sided variants. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read victor9k --612 -s drive:0 -o victor9k.img` | ||||
|   - `fluxengine read victor9k --1224 -s drive:0 -o victor9k.img` | ||||
|   - `fluxengine read -c victor9k --612 -s drive:0 -o victor9k.img` | ||||
|   - `fluxengine read -c victor9k --1224 -s drive:0 -o victor9k.img` | ||||
|  | ||||
| To write: | ||||
|  | ||||
|   - `fluxengine write victor9k --612 -d drive:0 -i victor9k.img` | ||||
|   - `fluxengine write victor9k --1224 -d drive:0 -i victor9k.img` | ||||
|   - `fluxengine write -c victor9k --612 -d drive:0 -i victor9k.img` | ||||
|   - `fluxengine write -c victor9k --1224 -d drive:0 -i victor9k.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -31,7 +31,7 @@ system. | ||||
|  | ||||
| To read: | ||||
|  | ||||
|   - `fluxengine read zilogmcz -s drive:0 -o zilogmcz.img` | ||||
|   - `fluxengine read -c zilogmcz -s drive:0 -o zilogmcz.img` | ||||
|  | ||||
| ## References | ||||
|  | ||||
|   | ||||
| @@ -36,10 +36,10 @@ Forty track formats on a forty track drive | ||||
| ------------------------------------------ | ||||
|  | ||||
| If you actually have a forty track drive, you need to tell FluxEngine. This is | ||||
| done by adding the special profile `40track_drive`: | ||||
| done by adding `--drivetype=40`: | ||||
|  | ||||
| ``` | ||||
| fluxengine write ibm --360 40track_drive -i image.img -d drive:0 | ||||
| fluxengine write -c ibm --360 --drivetype=40 -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| It should then Just Work. This is supported by both FluxEngine and Greaseweazle | ||||
| @@ -47,24 +47,6 @@ hardware. | ||||
|  | ||||
| Obviously you can't write an eighty-track format using a forty-track drive! | ||||
|  | ||||
| Apple II drives | ||||
| --------------- | ||||
|  | ||||
| The Apple II had special drives which supported microstepping: when commanded | ||||
| to move the head, then instead of moving in single-track steps as is done in | ||||
| most other drives, the Apple II drive would move in quarter-track steps. This | ||||
| allowed much less precise head alignment, as small errors could be corrected in | ||||
| software. (The Brother word processor drives were similar.) The bus interface | ||||
| is different from normal PC drives. | ||||
|  | ||||
| The FluxEngine client supports these with the `apple2_drive` profile: | ||||
|  | ||||
| ``` | ||||
| fluxengine write apple2 apple2_drive -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| This is supported only by Greaseweazle hardware. | ||||
|  | ||||
| Shugart drives | ||||
| -------------- | ||||
|  | ||||
| @@ -77,14 +59,32 @@ the drives must be jumpered to configure them. This was mostly used by older | ||||
| 3.5" drives, such as those on the Atari ST. [the How It Works | ||||
| page](technical.md) for the pinout. | ||||
|  | ||||
| The FluxEngine client supports these with the `shugart_drive` profile: | ||||
| The FluxEngine client supports these with `--bus=shugart`: | ||||
|  | ||||
| ``` | ||||
| fluxengine write atarist720 shugart_drive -i image.img -d drive:0 | ||||
| fluxengine write -c atarist720 --bus=shugart -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| (If you have a 40-track Shugart drive, use _both_ `shugart_drive` and | ||||
| `40track_drive`.) | ||||
| (If you have a 40-track Shugart drive, use _both_ `--bus=shugart` and | ||||
| `--drivetype=40`.) | ||||
|  | ||||
| This is supported only by Greaseweazle hardware. | ||||
|  | ||||
| Apple II drives | ||||
| --------------- | ||||
|  | ||||
| The Apple II had special drives which supported microstepping: when commanded | ||||
| to move the head, then instead of moving in single-track steps as is done in | ||||
| most other drives, the Apple II drive would move in quarter-track steps. This | ||||
| allowed much less precise head alignment, as small errors could be corrected in | ||||
| software. (The Brother word processor drives were similar.) The bus interface | ||||
| is different from normal PC drives. | ||||
|  | ||||
| The FluxEngine client supports these with `--drivetype=160 --bus=appleii`. | ||||
|  | ||||
| ``` | ||||
| fluxengine write -c apple2 --drivetype=160 --bus=appleii -i image.img -d drive:0 | ||||
| ``` | ||||
|  | ||||
| This is supported only by Greaseweazle hardware. | ||||
|  | ||||
|   | ||||
							
								
								
									
										53
									
								
								doc/using.md
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								doc/using.md
									
									
									
									
									
								
							| @@ -15,7 +15,7 @@ If possible, try using the GUI, which should provide simplified access for most | ||||
| common operations. | ||||
|  | ||||
| <div style="text-align: center"> | ||||
| <a href="doc/screenshot-details.png"><img src="doc/screenshot-details.png" style="width:60%" alt="screenshot of the GUI in action"></a> | ||||
| <a href="screenshot-details.png"><img src="screenshot-details.png" style="width:60%" alt="screenshot of the GUI in action"></a> | ||||
| </div> | ||||
|  | ||||
| ### Core concepts | ||||
| @@ -82,16 +82,16 @@ Here are some sample invocations: | ||||
| ``` | ||||
| # Read an PC 1440kB disk, producing a disk image with the default name | ||||
| # (ibm.img) | ||||
| $ fluxengine read ibm --1440 | ||||
| $ fluxengine read -c ibm --1440 | ||||
|  | ||||
| # Write a PC 1440kB disk to drive 1 | ||||
| $ fluxengine write ibm --1440 -i image.img -d drive:1 | ||||
| $ fluxengine write -c ibm --1440 -i image.img -d drive:1 | ||||
|  | ||||
| # Read a Eco1 CP/M disk, making a copy of the flux into a file | ||||
| $ fluxengine read eco1 --copy-flux-to copy.flux -o eco1.ldbs | ||||
| $ fluxengine read -c eco1 --copy-flux-to copy.flux -o eco1.ldbs | ||||
|  | ||||
| # Rerun the decode from the flux file, tweaking the parameters | ||||
| $ fluxengine read eco1 -s copy.flux -o eco1.ldbs --cylinders=1 | ||||
| $ fluxengine read -c eco1 -s copy.flux -o eco1.ldbs --cylinders=1 | ||||
| ``` | ||||
|  | ||||
| ### Configuration | ||||
| @@ -108,13 +108,13 @@ encoder { | ||||
|     } | ||||
|   } | ||||
| } | ||||
| $ fluxengine write ibm --1440 config.textpb -i image.img | ||||
| $ fluxengine write -c ibm --1440 -c config.textpb -i image.img | ||||
| ``` | ||||
|  | ||||
| ...or you can specify them on the command line: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine write ibm --1440 -i image.img --encoder.ibm.trackdata.emit_iam=false | ||||
| $ fluxengine write -c ibm --1440 -i image.img --encoder.ibm.trackdata.emit_iam=false | ||||
| ``` | ||||
|  | ||||
| Both the above invocations are equivalent. The text files use [Google's | ||||
| @@ -128,7 +128,7 @@ files as you wish; they are all merged left to right.  You can see all these | ||||
| settings by doing: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine write ibm --1440 --config | ||||
| $ fluxengine write -c ibm --1440 --show-config | ||||
| ``` | ||||
|  | ||||
| The `--config` option will cause the current configuration to be dumped to the | ||||
| @@ -146,40 +146,26 @@ different task. Run each one with `--help` to get a full list of | ||||
| (non-configuration-setting) options; this describes only basic usage of the | ||||
| more common tools. | ||||
|  | ||||
|   - `fluxengine read <profile> <options> -s <flux source> -o <image output>` | ||||
|   - `fluxengine read -c <profile> <options> -s <flux source> -o <image output>` | ||||
|  | ||||
|     Reads flux (possibly from a disk) and decodes it into a file system image. | ||||
|     `<profile>` is a reference to an internal input configuration file | ||||
|     describing the format. `<options>` may be any combination of options | ||||
|     defined by the profile. | ||||
|  | ||||
|   - `fluxengine write <profile> -i <image input> -d <flux destination>` | ||||
|   - `fluxengine write -c <profile> -i <image input> -d <flux destination>` | ||||
|  | ||||
|     Reads a filesystem image and encodes it into flux (possibly writing to a | ||||
|     disk). `<profile>` is a reference to an internal output configuration file | ||||
|     describing the format. | ||||
|  | ||||
|   - `fluxengine rawread -s <flux source> -d <flux destination>` | ||||
|  | ||||
|     Reads flux (possibly from a disk) and writes it to a flux file without doing | ||||
|     any decoding. You can specify a profile if you want to read a subset of the | ||||
|     disk. | ||||
|  | ||||
|   - `fluxengine rawwrite -s <flux source> -d <flux destination>` | ||||
|  | ||||
|     Reads flux from a file and writes it (possibly to a disk) without doing any | ||||
|     encoding. You can specify a profile if you want to write a subset of the | ||||
|     disk. | ||||
|  | ||||
|   - `fluxengine merge -s <fluxfile> -s <fluxfile...> -d <fluxfile` | ||||
|  | ||||
|     Merges data from multiple flux files together. This is useful if you have | ||||
|     several reads from an unreliable disk where each read has a different set | ||||
|     of good sectors. By merging the flux files, you get to combine all the | ||||
|     data. Don't use this on reads of different disks, for obvious results! Note | ||||
|     that this works on flux files, not on flux sources. | ||||
|  | ||||
|   - `fluxengine inspect -s <flux source> -c <cylinder> -h <head> -B` | ||||
|   - `fluxengine inspect -s <flux source> -t <track> -h <head> -B` | ||||
|  | ||||
|     Reads flux (possibly from a disk) and does various analyses of it to try and | ||||
|     detect the clock rate, display raw flux information, examine the underlying | ||||
| @@ -198,14 +184,8 @@ more common tools. | ||||
|  | ||||
| There are other tools; try `fluxengine --help`. | ||||
|  | ||||
| **Important note on `rawread` and `rawwrite`:** You can't use these tools to | ||||
| **Important note on `rawwrite`:** You can't use theis tool to | ||||
| copy disks, in most circumstances. See [the FAQ](faq.md) for more information. | ||||
| Also, `rawread` is not guaranteed to read correctly. Floppy disks are | ||||
| fundamentally unreliable, and random bit errors may occur at any time; these | ||||
| can only be detected by performing a decode and verifying the checksums on the | ||||
| sectors. To perform a correct read, it's recommended to do `fluxengine read` | ||||
| with the `--copy-flux-to` option, to perform a decode to a filesystem image | ||||
| while also writing to a flux file. | ||||
|  | ||||
| ### Flux sources and destinations | ||||
|  | ||||
| @@ -405,9 +385,8 @@ disks, and have different magnetic properties. 3.5" drives can usually | ||||
| autodetect what kind of medium is inserted into the drive based on the hole in | ||||
| the disk casing, but 5.25" drives can't. As a result, you need to explicitly | ||||
| tell FluxEngine on the command line whether you're using a high density disk or | ||||
| not with the `--drive.high_density` configuration setting. | ||||
| **If you don't do this, your disks may not read correctly and will _certainly_ | ||||
| fail to write correctly.** | ||||
| not with the `--hd` configuration setting.  **If you don't do this, your disks | ||||
| may not read correctly and will _certainly_ fail to write correctly.** | ||||
|  | ||||
| You can distinguish high density 5.25" floppies from the presence of a | ||||
| traction ring around the hole in the middle of the disk; if the ring is not | ||||
| @@ -489,7 +468,7 @@ containing valuable historical data, and you want to read them. | ||||
| Typically I do this: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine read brother240 -s drive:0 -o brother.img --copy-flux-to=brother.flux --decoder.write_csv_to=brother.csv | ||||
| $ fluxengine read -c brother240 -s drive:0 -o brother.img --copy-flux-to=brother.flux --decoder.write_csv_to=brother.csv | ||||
| ``` | ||||
|  | ||||
| This will read the disk in drive 0 and write out an information CSV file. It'll | ||||
| @@ -499,7 +478,7 @@ settings, I can rerun the decode without having to physically touch the disk | ||||
| like this: | ||||
|  | ||||
| ``` | ||||
| $ fluxengine read brother -s brother.flux -o brother.img --decoder.write_csv_to=brother.csv | ||||
| $ fluxengine read -c brother -s brother.flux -o brother.img --decoder.write_csv_to=brother.csv | ||||
| ``` | ||||
|  | ||||
| Apart from being drastically faster, this avoids touching the (potentially | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from build.c import clibrary | ||||
| from build.zip import zip | ||||
| from glob import glob | ||||
| from os.path import * | ||||
| import config | ||||
|  | ||||
| icons = ["fluxfile", "hardware", "icon", "imagefile"] | ||||
|  | ||||
| @@ -17,37 +18,37 @@ clibrary( | ||||
|     }, | ||||
| ) | ||||
|  | ||||
| simplerule( | ||||
|     name="fluxengine_icns", | ||||
|     ins=["./icon.png"], | ||||
|     outs=["=fluxengine.icns"], | ||||
|     commands=[ | ||||
|         "mkdir -p fluxengine.iconset", | ||||
|         "sips -z 64 64 $[ins[0]] --out fluxengine.iconset/icon_32x32@2x.png > /dev/null", | ||||
|         "iconutil -c icns -o $[outs[0]] fluxengine.iconset", | ||||
|     ], | ||||
|     label="ICONSET", | ||||
| ) | ||||
|  | ||||
| simplerule( | ||||
|     name="fluxengine_ico", | ||||
|     ins=["./icon.png"], | ||||
|     outs=["=fluxengine.ico"], | ||||
|     commands=["png2ico $[outs[0]] $[ins[0]]"], | ||||
|     label="MAKEICON", | ||||
| ) | ||||
|  | ||||
| template_files = [ | ||||
|     f | ||||
|     for f in glob( | ||||
|         "**", recursive=True, root_dir="extras/FluxEngine.app.template" | ||||
| if config.osx: | ||||
|     simplerule( | ||||
|         name="fluxengine_icns", | ||||
|         ins=["./icon.png"], | ||||
|         outs=["=fluxengine.icns"], | ||||
|         commands=[ | ||||
|             "mkdir -p fluxengine.iconset", | ||||
|             "sips -z 64 64 $[ins[0]] --out fluxengine.iconset/icon_32x32@2x.png > /dev/null", | ||||
|             "iconutil -c icns -o $[outs[0]] fluxengine.iconset", | ||||
|         ], | ||||
|         label="ICONSET", | ||||
|     ) | ||||
|  | ||||
|     template_files = [ | ||||
|         f | ||||
|         for f in glob("**", recursive=True, root_dir="extras/FluxEngine.app.template") | ||||
|         if isfile(join("extras/FluxEngine.app.template", f)) | ||||
|     ] | ||||
|     zip( | ||||
|         name="fluxengine_template", | ||||
|         items={ | ||||
|             join("FluxEngine.app", k): join("extras/FluxEngine.app.template", k) | ||||
|             for k in template_files | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
| if config.windows: | ||||
|     simplerule( | ||||
|         name="fluxengine_ico", | ||||
|         ins=["./icon.png"], | ||||
|         outs=["=fluxengine.ico"], | ||||
|         commands=["png2ico $[outs[0]] $[ins[0]]"], | ||||
|         label="MAKEICON", | ||||
|     ) | ||||
|     if isfile(join("extras/FluxEngine.app.template", f)) | ||||
| ] | ||||
| zip( | ||||
|     name="fluxengine_template", | ||||
|     items={ | ||||
|         join("FluxEngine.app", k): join("extras/FluxEngine.app.template", k) | ||||
|         for k in template_files | ||||
|     }, | ||||
| ) | ||||
|   | ||||
| @@ -84,16 +84,12 @@ void renderLogMessage( | ||||
| void renderLogMessage( | ||||
|     LogRenderer& r, std::shared_ptr<const TrackReadLogMessage> m) | ||||
| { | ||||
|     const auto& track = *m->track; | ||||
|  | ||||
|     std::set<std::shared_ptr<const Sector>> rawSectors; | ||||
|     std::set<std::shared_ptr<const Record>> rawRecords; | ||||
|     for (const auto& trackDataFlux : track.trackDatas) | ||||
|     for (const auto& track : m->tracks) | ||||
|     { | ||||
|         rawSectors.insert( | ||||
|             trackDataFlux->sectors.begin(), trackDataFlux->sectors.end()); | ||||
|         rawRecords.insert( | ||||
|             trackDataFlux->records.begin(), trackDataFlux->records.end()); | ||||
|         rawSectors.insert(track->allSectors.begin(), track->allSectors.end()); | ||||
|         rawRecords.insert(track->records.begin(), track->records.end()); | ||||
|     } | ||||
|  | ||||
|     nanoseconds_t clock = 0; | ||||
| @@ -114,22 +110,22 @@ void renderLogMessage( | ||||
|     r.newline().add("sectors:"); | ||||
|  | ||||
|     std::vector<std::shared_ptr<const Sector>> sectors( | ||||
|         track.sectors.begin(), track.sectors.end()); | ||||
|         m->sectors.begin(), m->sectors.end()); | ||||
|     std::sort(sectors.begin(), sectors.end(), sectorPointerSortPredicate); | ||||
|  | ||||
|     for (const auto& sector : sectors) | ||||
|     for (const auto& sector : rawSectors) | ||||
|         r.add(fmt::format("{}.{}.{}{}", | ||||
|             sector->logicalTrack, | ||||
|             sector->logicalSide, | ||||
|             sector->logicalCylinder, | ||||
|             sector->logicalHead, | ||||
|             sector->logicalSector, | ||||
|             Sector::statusToChar(sector->status))); | ||||
|  | ||||
|     int size = 0; | ||||
|     std::set<std::pair<int, int>> track_ids; | ||||
|     for (const auto& sector : m->track->sectors) | ||||
|     for (const auto& sector : m->sectors) | ||||
|     { | ||||
|         track_ids.insert( | ||||
|             std::make_pair(sector->logicalTrack, sector->logicalSide)); | ||||
|             std::make_pair(sector->logicalCylinder, sector->logicalHead)); | ||||
|         size += sector->data.size(); | ||||
|     } | ||||
|  | ||||
| @@ -184,12 +180,16 @@ private: | ||||
|         _cache; | ||||
| }; | ||||
|  | ||||
| void measureDiskRotation() | ||||
| static nanoseconds_t getRotationalPeriodFromConfig() | ||||
| { | ||||
|     return globalConfig()->drive().rotational_period_ms() * 1e6; | ||||
| } | ||||
|  | ||||
| static nanoseconds_t measureDiskRotation() | ||||
| { | ||||
|     log(BeginSpeedOperationLogMessage()); | ||||
|  | ||||
|     nanoseconds_t oneRevolution = | ||||
|         globalConfig()->drive().rotational_period_ms() * 1e6; | ||||
|     nanoseconds_t oneRevolution = getRotationalPeriodFromConfig(); | ||||
|     if (oneRevolution == 0) | ||||
|     { | ||||
|         usbSetDrive(globalConfig()->drive().drive(), | ||||
| @@ -224,22 +224,24 @@ void measureDiskRotation() | ||||
|         error("Failed\nIs a disk in the drive?"); | ||||
|  | ||||
|     log(EndSpeedOperationLogMessage{oneRevolution}); | ||||
|     return oneRevolution; | ||||
| } | ||||
|  | ||||
| /* Given a set of sectors, deduplicates them sensibly (e.g. if there is a good | ||||
|  * and bad version of the same sector, the bad version is dropped). */ | ||||
|  | ||||
| static std::set<std::shared_ptr<const Sector>> collectSectors( | ||||
|     std::set<std::shared_ptr<const Sector>>& track_sectors, | ||||
| static std::vector<std::shared_ptr<const Sector>> collectSectors( | ||||
|     std::vector<std::shared_ptr<const Sector>>& trackSectors, | ||||
|     bool collapse_conflicts = true) | ||||
| { | ||||
|     typedef std::tuple<unsigned, unsigned, unsigned> key_t; | ||||
|     std::multimap<key_t, std::shared_ptr<const Sector>> sectors; | ||||
|  | ||||
|     for (const auto& sector : track_sectors) | ||||
|     for (const auto& sector : trackSectors) | ||||
|     { | ||||
|         key_t sectorid = { | ||||
|             sector->logicalTrack, sector->logicalSide, sector->logicalSector}; | ||||
|         key_t sectorid = {sector->logicalCylinder, | ||||
|             sector->logicalHead, | ||||
|             sector->logicalSector}; | ||||
|         sectors.insert({sectorid, sector}); | ||||
|     } | ||||
|  | ||||
| @@ -281,42 +283,52 @@ static std::set<std::shared_ptr<const Sector>> collectSectors( | ||||
|         sector_set.insert(new_sector); | ||||
|         it = ub; | ||||
|     } | ||||
|     return sector_set; | ||||
|     return sector_set | std::ranges::to<std::vector>(); | ||||
| } | ||||
|  | ||||
| BadSectorsState combineRecordAndSectors(TrackFlux& trackFlux, | ||||
|     Decoder& decoder, | ||||
|     std::shared_ptr<const TrackInfo>& trackLayout) | ||||
| struct CombinationResult | ||||
| { | ||||
|     std::set<std::shared_ptr<const Sector>> track_sectors; | ||||
|     BadSectorsState result; | ||||
|     std::vector<std::shared_ptr<const Sector>> sectors; | ||||
| }; | ||||
|  | ||||
| static CombinationResult combineRecordAndSectors( | ||||
|     std::vector<std::shared_ptr<const Track>>& tracks, | ||||
|     Decoder& decoder, | ||||
|     const std::shared_ptr<const LogicalTrackLayout>& ltl) | ||||
| { | ||||
|     CombinationResult cr = {HAS_NO_BAD_SECTORS}; | ||||
|     std::vector<std::shared_ptr<const Sector>> track_sectors; | ||||
|  | ||||
|     /* Add the sectors which were there. */ | ||||
|  | ||||
|     for (auto& trackdataflux : trackFlux.trackDatas) | ||||
|         track_sectors.insert( | ||||
|             trackdataflux->sectors.begin(), trackdataflux->sectors.end()); | ||||
|     for (auto& track : tracks) | ||||
|         for (auto& sector : track->allSectors) | ||||
|             track_sectors.push_back(sector); | ||||
|  | ||||
|     /* Add the sectors which should be there. */ | ||||
|  | ||||
|     for (unsigned sectorId : trackLayout->naturalSectorOrder) | ||||
|     for (unsigned sectorId : ltl->diskSectorOrder) | ||||
|     { | ||||
|         auto sector = std::make_shared<Sector>(LogicalLocation{ | ||||
|             trackLayout->logicalTrack, trackLayout->logicalSide, sectorId}); | ||||
|         auto sector = std::make_shared<Sector>( | ||||
|             LogicalLocation{ltl->logicalCylinder, ltl->logicalHead, sectorId}); | ||||
|  | ||||
|         sector->status = Sector::MISSING; | ||||
|         track_sectors.insert(sector); | ||||
|         sector->physicalLocation = std::make_optional( | ||||
|             CylinderHead(ltl->physicalCylinder, ltl->physicalHead)); | ||||
|         track_sectors.push_back(sector); | ||||
|     } | ||||
|  | ||||
|     /* Deduplicate. */ | ||||
|  | ||||
|     trackFlux.sectors = collectSectors(track_sectors); | ||||
|     if (trackFlux.sectors.empty()) | ||||
|         return HAS_BAD_SECTORS; | ||||
|     for (const auto& sector : trackFlux.sectors) | ||||
|     cr.sectors = collectSectors(track_sectors); | ||||
|     if (cr.sectors.empty()) | ||||
|         cr.result = HAS_BAD_SECTORS; | ||||
|     for (const auto& sector : cr.sectors) | ||||
|         if (sector->status != Sector::OK) | ||||
|             return HAS_BAD_SECTORS; | ||||
|             cr.result = HAS_BAD_SECTORS; | ||||
|  | ||||
|     return HAS_NO_BAD_SECTORS; | ||||
|     return cr; | ||||
| } | ||||
|  | ||||
| static void adjustTrackOnError(FluxSource& fluxSource, int baseTrack) | ||||
| @@ -339,179 +351,226 @@ static void adjustTrackOnError(FluxSource& fluxSource, int baseTrack) | ||||
|     } | ||||
| } | ||||
|  | ||||
| ReadResult readGroup(FluxSourceIteratorHolder& fluxSourceIteratorHolder, | ||||
|     std::shared_ptr<const TrackInfo>& trackInfo, | ||||
|     TrackFlux& trackFlux, | ||||
| struct ReadGroupResult | ||||
| { | ||||
|     ReadResult result; | ||||
|     std::vector<std::shared_ptr<const Sector>> combinedSectors; | ||||
| }; | ||||
|  | ||||
| static ReadGroupResult readGroup(const DiskLayout& diskLayout, | ||||
|     FluxSourceIteratorHolder& fluxSourceIteratorHolder, | ||||
|     const std::shared_ptr<const LogicalTrackLayout>& ltl, | ||||
|     std::vector<std::shared_ptr<const Track>>& tracks, | ||||
|     Decoder& decoder) | ||||
| { | ||||
|     ReadResult result = BAD_AND_CAN_NOT_RETRY; | ||||
|     ReadGroupResult rgr = {BAD_AND_CAN_NOT_RETRY}; | ||||
|  | ||||
|     /* Before doing the read, look to see if we already have the necessary | ||||
|      * sectors. */ | ||||
|  | ||||
|     for (unsigned offset = 0; offset < trackInfo->groupSize; | ||||
|         offset += Layout::getHeadWidth()) | ||||
|     { | ||||
|         log(BeginReadOperationLogMessage{ | ||||
|             trackInfo->physicalTrack + offset, trackInfo->physicalSide}); | ||||
|         auto [result, sectors] = combineRecordAndSectors(tracks, decoder, ltl); | ||||
|         rgr.combinedSectors = sectors; | ||||
|         if (result == HAS_NO_BAD_SECTORS) | ||||
|         { | ||||
|             /* We have all necessary sectors, so can stop here. */ | ||||
|             rgr.result = GOOD_READ; | ||||
|             if (globalConfig()->decoder().skip_unnecessary_tracks()) | ||||
|                 return rgr; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     for (unsigned offset = 0; offset < ltl->groupSize; | ||||
|         offset += diskLayout.headWidth) | ||||
|     { | ||||
|         unsigned physicalCylinder = ltl->physicalCylinder + offset; | ||||
|         unsigned physicalHead = ltl->physicalHead; | ||||
|         auto& ptl = diskLayout.layoutByPhysicalLocation.at( | ||||
|             {physicalCylinder, physicalHead}); | ||||
|  | ||||
|         /* Do the physical read. */ | ||||
|  | ||||
|         log(BeginReadOperationLogMessage{physicalCylinder, physicalHead}); | ||||
|  | ||||
|         auto& fluxSourceIterator = fluxSourceIteratorHolder.getIterator( | ||||
|             trackInfo->physicalTrack + offset, trackInfo->physicalSide); | ||||
|             physicalCylinder, physicalHead); | ||||
|         if (!fluxSourceIterator.hasNext()) | ||||
|             continue; | ||||
|  | ||||
|         std::shared_ptr<const Fluxmap> fluxmap = fluxSourceIterator.next(); | ||||
|         // ->rescale( | ||||
|         //     1.0 / globalConfig()->flux_source().rescale()); | ||||
|         auto fluxmap = fluxSourceIterator.next(); | ||||
|         log(EndReadOperationLogMessage()); | ||||
|         log("{0} ms in {1} bytes", | ||||
|             (int)(fluxmap->duration() / 1e6), | ||||
|             fluxmap->bytes()); | ||||
|  | ||||
|         auto trackdataflux = decoder.decodeToSectors(fluxmap, trackInfo); | ||||
|         trackFlux.trackDatas.push_back(trackdataflux); | ||||
|         if (combineRecordAndSectors(trackFlux, decoder, trackInfo) == | ||||
|             HAS_NO_BAD_SECTORS) | ||||
|         auto flux = decoder.decodeToSectors(std::move(fluxmap), ptl); | ||||
|         flux->normalisedSectors = collectSectors(flux->allSectors); | ||||
|         tracks.push_back(flux); | ||||
|  | ||||
|         /* Decode what we've got so far. */ | ||||
|  | ||||
|         auto [result, sectors] = combineRecordAndSectors(tracks, decoder, ltl); | ||||
|         rgr.combinedSectors = sectors; | ||||
|         if (result == HAS_NO_BAD_SECTORS) | ||||
|         { | ||||
|             result = GOOD_READ; | ||||
|             /* We have all necessary sectors, so can stop here. */ | ||||
|             rgr.result = GOOD_READ; | ||||
|             if (globalConfig()->decoder().skip_unnecessary_tracks()) | ||||
|                 return result; | ||||
|                 break; | ||||
|         } | ||||
|         else if (fluxSourceIterator.hasNext()) | ||||
|             result = BAD_AND_CAN_RETRY; | ||||
|         { | ||||
|             /* The flux source claims it can do more reads, so mark this | ||||
|              * group as being retryable. */ | ||||
|             rgr.result = BAD_AND_CAN_RETRY; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     return result; | ||||
|     return rgr; | ||||
| } | ||||
|  | ||||
| void writeTracks(FluxSink& fluxSink, | ||||
| void writeTracks(const DiskLayout& diskLayout, | ||||
|  | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     std::function<std::unique_ptr<const Fluxmap>( | ||||
|         std::shared_ptr<const TrackInfo>& trackInfo)> producer, | ||||
|     std::function<bool(std::shared_ptr<const TrackInfo>& trackInfo)> verifier, | ||||
|     std::vector<std::shared_ptr<const TrackInfo>>& trackInfos) | ||||
|         const std::shared_ptr<const LogicalTrackLayout>&)> producer, | ||||
|     std::function<bool(const std::shared_ptr<const LogicalTrackLayout>&)> | ||||
|         verifier, | ||||
|     const std::vector<CylinderHead>& logicalLocations) | ||||
| { | ||||
|     log(BeginOperationLogMessage{"Encoding and writing to disk"}); | ||||
|  | ||||
|     if (fluxSink.isHardware()) | ||||
|     if (fluxSinkFactory.isHardware()) | ||||
|         measureDiskRotation(); | ||||
|     int index = 0; | ||||
|     for (auto& trackInfo : trackInfos) | ||||
|     { | ||||
|         log(OperationProgressLogMessage{ | ||||
|             index * 100 / (unsigned)trackInfos.size()}); | ||||
|         index++; | ||||
|  | ||||
|         testForEmergencyStop(); | ||||
|  | ||||
|         int retriesRemaining = globalConfig()->decoder().retries(); | ||||
|         for (;;) | ||||
|         auto fluxSink = fluxSinkFactory.create(); | ||||
|         int index = 0; | ||||
|         for (auto& ch : logicalLocations) | ||||
|         { | ||||
|             for (int offset = 0; offset < trackInfo->groupSize; | ||||
|                 offset += Layout::getHeadWidth()) | ||||
|             log(OperationProgressLogMessage{ | ||||
|                 index * 100 / (unsigned)logicalLocations.size()}); | ||||
|             index++; | ||||
|  | ||||
|             testForEmergencyStop(); | ||||
|  | ||||
|             const auto& ltl = diskLayout.layoutByLogicalLocation.at(ch); | ||||
|             int retriesRemaining = globalConfig()->decoder().retries(); | ||||
|             for (;;) | ||||
|             { | ||||
|                 unsigned physicalTrack = trackInfo->physicalTrack + offset; | ||||
|  | ||||
|                 log(BeginWriteOperationLogMessage{ | ||||
|                     physicalTrack, trackInfo->physicalSide}); | ||||
|  | ||||
|                 if (offset == globalConfig()->drive().group_offset()) | ||||
|                 for (int offset = 0; offset < ltl->groupSize; | ||||
|                     offset += diskLayout.headWidth) | ||||
|                 { | ||||
|                     auto fluxmap = producer(trackInfo); | ||||
|                     if (!fluxmap) | ||||
|                         goto erase; | ||||
|                     unsigned physicalCylinder = ltl->physicalCylinder + offset; | ||||
|                     unsigned physicalHead = ltl->physicalHead; | ||||
|  | ||||
|                     fluxSink.writeFlux( | ||||
|                         physicalTrack, trackInfo->physicalSide, *fluxmap); | ||||
|                     log("writing {0} ms in {1} bytes", | ||||
|                         int(fluxmap->duration() / 1e6), | ||||
|                         fluxmap->bytes()); | ||||
|                 } | ||||
|                 else | ||||
|                 { | ||||
|                 erase: | ||||
|                     /* Erase this track rather than writing. */ | ||||
|                     log(BeginWriteOperationLogMessage{ | ||||
|                         physicalCylinder, ltl->physicalHead}); | ||||
|  | ||||
|                     Fluxmap blank; | ||||
|                     fluxSink.writeFlux( | ||||
|                         physicalTrack, trackInfo->physicalSide, blank); | ||||
|                     log("erased"); | ||||
|                     if (offset == globalConfig()->drive().group_offset()) | ||||
|                     { | ||||
|                         auto fluxmap = producer(ltl); | ||||
|                         if (!fluxmap) | ||||
|                             goto erase; | ||||
|  | ||||
|                         fluxSink->addFlux( | ||||
|                             physicalCylinder, physicalHead, *fluxmap); | ||||
|                         log("writing {0} ms in {1} bytes", | ||||
|                             int(fluxmap->duration() / 1e6), | ||||
|                             fluxmap->bytes()); | ||||
|                     } | ||||
|                     else | ||||
|                     { | ||||
|                     erase: | ||||
|                         /* Erase this track rather than writing. */ | ||||
|  | ||||
|                         Fluxmap blank; | ||||
|                         fluxSink->addFlux( | ||||
|                             physicalCylinder, physicalHead, blank); | ||||
|                         log("erased"); | ||||
|                     } | ||||
|  | ||||
|                     log(EndWriteOperationLogMessage()); | ||||
|                 } | ||||
|  | ||||
|                 log(EndWriteOperationLogMessage()); | ||||
|                 if (verifier(ltl)) | ||||
|                     break; | ||||
|  | ||||
|                 if (retriesRemaining == 0) | ||||
|                     error("fatal error on write"); | ||||
|  | ||||
|                 log("retrying; {} retries remaining", retriesRemaining); | ||||
|                 retriesRemaining--; | ||||
|             } | ||||
|  | ||||
|             if (verifier(trackInfo)) | ||||
|                 break; | ||||
|  | ||||
|             if (retriesRemaining == 0) | ||||
|                 error("fatal error on write"); | ||||
|  | ||||
|             log("retrying; {} retries remaining", retriesRemaining); | ||||
|             retriesRemaining--; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     log(EndOperationLogMessage{"Write complete"}); | ||||
| } | ||||
|  | ||||
| void writeTracks(FluxSink& fluxSink, | ||||
| void writeTracks(const DiskLayout& diskLayout, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Encoder& encoder, | ||||
|     const Image& image, | ||||
|     std::vector<std::shared_ptr<const TrackInfo>>& trackInfos) | ||||
|     const std::vector<CylinderHead>& chs) | ||||
| { | ||||
|     writeTracks( | ||||
|         fluxSink, | ||||
|         [&](std::shared_ptr<const TrackInfo>& trackInfo) | ||||
|         diskLayout, | ||||
|         fluxSinkFactory, | ||||
|         [&](const std::shared_ptr<const LogicalTrackLayout>& ltl) | ||||
|         { | ||||
|             auto sectors = encoder.collectSectors(trackInfo, image); | ||||
|             return encoder.encode(trackInfo, sectors, image); | ||||
|             auto sectors = encoder.collectSectors(*ltl, image); | ||||
|             return encoder.encode(*ltl, sectors, image); | ||||
|         }, | ||||
|         [](const auto&) | ||||
|         { | ||||
|             return true; | ||||
|         }, | ||||
|         trackInfos); | ||||
|         chs); | ||||
| } | ||||
|  | ||||
| void writeTracksAndVerify(FluxSink& fluxSink, | ||||
| void writeTracksAndVerify(const DiskLayout& diskLayout, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Encoder& encoder, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     const Image& image, | ||||
|     std::vector<std::shared_ptr<const TrackInfo>>& trackInfos) | ||||
|     const std::vector<CylinderHead>& chs) | ||||
| { | ||||
|     writeTracks( | ||||
|         fluxSink, | ||||
|         [&](std::shared_ptr<const TrackInfo>& trackInfo) | ||||
|         diskLayout, | ||||
|         fluxSinkFactory, | ||||
|         [&](const std::shared_ptr<const LogicalTrackLayout>& ltl) | ||||
|         { | ||||
|             auto sectors = encoder.collectSectors(trackInfo, image); | ||||
|             return encoder.encode(trackInfo, sectors, image); | ||||
|             auto sectors = encoder.collectSectors(*ltl, image); | ||||
|             return encoder.encode(*ltl, sectors, image); | ||||
|         }, | ||||
|         [&](std::shared_ptr<const TrackInfo>& trackInfo) | ||||
|         [&](const std::shared_ptr<const LogicalTrackLayout>& ltl) | ||||
|         { | ||||
|             auto trackFlux = std::make_shared<TrackFlux>(); | ||||
|             trackFlux->trackInfo = trackInfo; | ||||
|             FluxSourceIteratorHolder fluxSourceIteratorHolder(fluxSource); | ||||
|             auto result = readGroup( | ||||
|                 fluxSourceIteratorHolder, trackInfo, *trackFlux, decoder); | ||||
|             log(TrackReadLogMessage{trackFlux}); | ||||
|             std::vector<std::shared_ptr<const Track>> tracks; | ||||
|             auto [result, sectors] = readGroup( | ||||
|                 diskLayout, fluxSourceIteratorHolder, ltl, tracks, decoder); | ||||
|             log(TrackReadLogMessage{tracks, sectors}); | ||||
|  | ||||
|             if (result != GOOD_READ) | ||||
|             { | ||||
|                 adjustTrackOnError(fluxSource, trackInfo->physicalTrack); | ||||
|                 adjustTrackOnError(fluxSource, ltl->physicalCylinder); | ||||
|                 log("bad read"); | ||||
|                 return false; | ||||
|             } | ||||
|  | ||||
|             Image wanted; | ||||
|             for (const auto& sector : encoder.collectSectors(trackInfo, image)) | ||||
|             for (const auto& sector : encoder.collectSectors(*ltl, image)) | ||||
|                 wanted | ||||
|                     .put(sector->logicalTrack, | ||||
|                         sector->logicalSide, | ||||
|                     .put(sector->logicalCylinder, | ||||
|                         sector->logicalHead, | ||||
|                         sector->logicalSector) | ||||
|                     ->data = sector->data; | ||||
|  | ||||
|             for (const auto& sector : trackFlux->sectors) | ||||
|             for (const auto& sector : sectors) | ||||
|             { | ||||
|                 const auto s = wanted.get(sector->logicalTrack, | ||||
|                     sector->logicalSide, | ||||
|                 const auto s = wanted.get(sector->logicalCylinder, | ||||
|                     sector->logicalHead, | ||||
|                     sector->logicalSector); | ||||
|                 if (!s) | ||||
|                 { | ||||
| @@ -523,8 +582,8 @@ void writeTracksAndVerify(FluxSink& fluxSink, | ||||
|                     log("data mismatch on verify"); | ||||
|                     return false; | ||||
|                 } | ||||
|                 wanted.erase(sector->logicalTrack, | ||||
|                     sector->logicalSide, | ||||
|                 wanted.erase(sector->logicalCylinder, | ||||
|                     sector->logicalHead, | ||||
|                     sector->logicalSector); | ||||
|             } | ||||
|             if (!wanted.empty()) | ||||
| @@ -534,60 +593,75 @@ void writeTracksAndVerify(FluxSink& fluxSink, | ||||
|             } | ||||
|             return true; | ||||
|         }, | ||||
|         trackInfos); | ||||
|         chs); | ||||
| } | ||||
|  | ||||
| void writeDiskCommand(const Image& image, | ||||
| void writeDiskCommand(const DiskLayout& diskLayout, | ||||
|     const Image& image, | ||||
|     Encoder& encoder, | ||||
|     FluxSink& fluxSink, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Decoder* decoder, | ||||
|     FluxSource* fluxSource, | ||||
|     const std::vector<CylinderHead>& physicalLocations) | ||||
| { | ||||
|     auto trackinfos = Layout::getLayoutOfTracksPhysical(physicalLocations); | ||||
|     auto chs = std::ranges::views::keys(diskLayout.layoutByLogicalLocation) | | ||||
|                std::ranges::to<std::vector>(); | ||||
|     if (fluxSource && decoder) | ||||
|         writeTracksAndVerify( | ||||
|             fluxSink, encoder, *fluxSource, *decoder, image, trackinfos); | ||||
|         writeTracksAndVerify(diskLayout, | ||||
|             fluxSinkFactory, | ||||
|             encoder, | ||||
|             *fluxSource, | ||||
|             *decoder, | ||||
|             image, | ||||
|             chs); | ||||
|     else | ||||
|         writeTracks(fluxSink, encoder, image, trackinfos); | ||||
|         writeTracks(diskLayout, fluxSinkFactory, encoder, image, chs); | ||||
| } | ||||
|  | ||||
| void writeDiskCommand(const Image& image, | ||||
| void writeDiskCommand(const DiskLayout& diskLayout, | ||||
|     const Image& image, | ||||
|     Encoder& encoder, | ||||
|     FluxSink& fluxSink, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Decoder* decoder, | ||||
|     FluxSource* fluxSource) | ||||
| { | ||||
|     auto locations = Layout::computePhysicalLocations(); | ||||
|     writeDiskCommand(image, encoder, fluxSink, decoder, fluxSource, locations); | ||||
|     writeDiskCommand(diskLayout, | ||||
|         image, | ||||
|         encoder, | ||||
|         fluxSinkFactory, | ||||
|         decoder, | ||||
|         fluxSource, | ||||
|         std::ranges::views::keys(diskLayout.layoutByLogicalLocation) | | ||||
|             std::ranges::to<std::vector>()); | ||||
| } | ||||
|  | ||||
| void writeRawDiskCommand(FluxSource& fluxSource, FluxSink& fluxSink) | ||||
| void writeRawDiskCommand(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     FluxSinkFactory& fluxSinkFactory) | ||||
| { | ||||
|     auto physicalLocations = Layout::computePhysicalLocations(); | ||||
|     auto trackinfos = Layout::getLayoutOfTracksPhysical(physicalLocations); | ||||
|     writeTracks( | ||||
|         fluxSink, | ||||
|         [&](std::shared_ptr<const TrackInfo>& trackInfo) | ||||
|         diskLayout, | ||||
|         fluxSinkFactory, | ||||
|         [&](const std::shared_ptr<const LogicalTrackLayout>& ltl) | ||||
|         { | ||||
|             return fluxSource | ||||
|                 .readFlux(trackInfo->physicalTrack, trackInfo->physicalSide) | ||||
|                 .readFlux(ltl->physicalCylinder, ltl->physicalHead) | ||||
|                 ->next(); | ||||
|         }, | ||||
|         [](const auto&) | ||||
|         { | ||||
|             return true; | ||||
|         }, | ||||
|         trackinfos); | ||||
|         diskLayout.logicalLocations); | ||||
| } | ||||
|  | ||||
| std::shared_ptr<TrackFlux> readAndDecodeTrack(FluxSource& fluxSource, | ||||
| void readAndDecodeTrack(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     std::shared_ptr<const TrackInfo>& trackInfo) | ||||
|     const std::shared_ptr<const LogicalTrackLayout>& ltl, | ||||
|     std::vector<std::shared_ptr<const Track>>& tracks, | ||||
|     std::vector<std::shared_ptr<const Sector>>& combinedSectors) | ||||
| { | ||||
|     auto trackFlux = std::make_shared<TrackFlux>(); | ||||
|     trackFlux->trackInfo = trackInfo; | ||||
|  | ||||
|     if (fluxSource.isHardware()) | ||||
|         measureDiskRotation(); | ||||
|  | ||||
| @@ -595,8 +669,9 @@ std::shared_ptr<TrackFlux> readAndDecodeTrack(FluxSource& fluxSource, | ||||
|     int retriesRemaining = globalConfig()->decoder().retries(); | ||||
|     for (;;) | ||||
|     { | ||||
|         auto result = | ||||
|             readGroup(fluxSourceIteratorHolder, trackInfo, *trackFlux, decoder); | ||||
|         auto [result, sectors] = readGroup( | ||||
|             diskLayout, fluxSourceIteratorHolder, ltl, tracks, decoder); | ||||
|         combinedSectors = sectors; | ||||
|         if (result == GOOD_READ) | ||||
|             break; | ||||
|         if (result == BAD_AND_CAN_NOT_RETRY) | ||||
| @@ -613,166 +688,172 @@ std::shared_ptr<TrackFlux> readAndDecodeTrack(FluxSource& fluxSource, | ||||
|  | ||||
|         if (fluxSource.isHardware()) | ||||
|         { | ||||
|             adjustTrackOnError(fluxSource, trackInfo->physicalTrack); | ||||
|             adjustTrackOnError(fluxSource, ltl->physicalCylinder); | ||||
|             log("retrying; {} retries remaining", retriesRemaining); | ||||
|             retriesRemaining--; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     return trackFlux; | ||||
| } | ||||
|  | ||||
| std::shared_ptr<const DiskFlux> readDiskCommand( | ||||
|     FluxSource& fluxSource, Decoder& decoder) | ||||
| void readDiskCommand(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     Disk& disk) | ||||
| { | ||||
|     std::unique_ptr<FluxSink> outputFluxSink; | ||||
|     std::unique_ptr<FluxSinkFactory> outputFluxSinkFactory; | ||||
|     if (globalConfig()->decoder().has_copy_flux_to()) | ||||
|         outputFluxSink = | ||||
|             FluxSink::create(globalConfig()->decoder().copy_flux_to()); | ||||
|         outputFluxSinkFactory = | ||||
|             FluxSinkFactory::create(globalConfig()->decoder().copy_flux_to()); | ||||
|  | ||||
|     auto diskflux = std::make_shared<DiskFlux>(); | ||||
|     std::map<CylinderHead, std::vector<std::shared_ptr<const Track>>> | ||||
|         tracksByLogicalLocation; | ||||
|     for (auto& [ch, track] : disk.tracksByPhysicalLocation) | ||||
|         tracksByLogicalLocation[CylinderHead(track->ltl->logicalCylinder, | ||||
|                                     track->ltl->logicalHead)] | ||||
|             .push_back(track); | ||||
|  | ||||
|     log(BeginOperationLogMessage{"Reading and decoding disk"}); | ||||
|     auto physicalLocations = Layout::computePhysicalLocations(); | ||||
|     unsigned index = 0; | ||||
|     for (auto& physicalLocation : physicalLocations) | ||||
|  | ||||
|     if (fluxSource.isHardware()) | ||||
|         disk.rotationalPeriod = measureDiskRotation(); | ||||
|     else | ||||
|         disk.rotationalPeriod = getRotationalPeriodFromConfig(); | ||||
|  | ||||
|     { | ||||
|         auto trackInfo = Layout::getLayoutOfTrackPhysical( | ||||
|             physicalLocation.cylinder, physicalLocation.head); | ||||
|  | ||||
|         log(OperationProgressLogMessage{ | ||||
|             index * 100 / (unsigned)physicalLocations.size()}); | ||||
|         index++; | ||||
|  | ||||
|         testForEmergencyStop(); | ||||
|  | ||||
|         auto trackFlux = readAndDecodeTrack(fluxSource, decoder, trackInfo); | ||||
|         diskflux->tracks.push_back(trackFlux); | ||||
|  | ||||
|         if (outputFluxSink) | ||||
|         std::unique_ptr<FluxSink> outputFluxSink; | ||||
|         if (outputFluxSinkFactory) | ||||
|             outputFluxSink = outputFluxSinkFactory->create(); | ||||
|         unsigned index = 0; | ||||
|         for (auto& [logicalLocation, ltl] : diskLayout.layoutByLogicalLocation) | ||||
|         { | ||||
|             for (const auto& data : trackFlux->trackDatas) | ||||
|                 outputFluxSink->writeFlux(trackInfo->physicalTrack, | ||||
|                     trackInfo->physicalSide, | ||||
|                     *data->fluxmap); | ||||
|         } | ||||
|             log(OperationProgressLogMessage{ | ||||
|                 index * 100 / | ||||
|                 (unsigned)diskLayout.layoutByLogicalLocation.size()}); | ||||
|             index++; | ||||
|  | ||||
|         if (globalConfig()->decoder().dump_records()) | ||||
|         { | ||||
|             std::vector<std::shared_ptr<const Record>> sorted_records; | ||||
|             testForEmergencyStop(); | ||||
|  | ||||
|             for (const auto& data : trackFlux->trackDatas) | ||||
|                 sorted_records.insert(sorted_records.end(), | ||||
|                     data->records.begin(), | ||||
|                     data->records.end()); | ||||
|             auto& trackFluxes = tracksByLogicalLocation[logicalLocation]; | ||||
|             std::vector<std::shared_ptr<const Sector>> trackSectors; | ||||
|             readAndDecodeTrack(diskLayout, | ||||
|                 fluxSource, | ||||
|                 decoder, | ||||
|                 ltl, | ||||
|                 trackFluxes, | ||||
|                 trackSectors); | ||||
|  | ||||
|             std::sort(sorted_records.begin(), | ||||
|                 sorted_records.end(), | ||||
|                 [](const auto& o1, const auto& o2) | ||||
|                 { | ||||
|                     return o1->startTime < o2->startTime; | ||||
|                 }); | ||||
|             /* Replace all tracks on the disk by the new combined set. */ | ||||
|  | ||||
|             std::cout << "\nRaw (undecoded) records follow:\n\n"; | ||||
|             for (const auto& record : sorted_records) | ||||
|             for (const auto& flux : trackFluxes) | ||||
|                 disk.tracksByPhysicalLocation.erase(CylinderHead{ | ||||
|                     flux->ptl->physicalCylinder, flux->ptl->physicalHead}); | ||||
|             for (const auto& flux : trackFluxes) | ||||
|                 disk.tracksByPhysicalLocation.emplace( | ||||
|                     CylinderHead{ | ||||
|                         flux->ptl->physicalCylinder, flux->ptl->physicalHead}, | ||||
|                     flux); | ||||
|  | ||||
|             /* Likewise for sectors. */ | ||||
|  | ||||
|             for (const auto& sector : trackSectors) | ||||
|                 disk.sectorsByPhysicalLocation.erase( | ||||
|                     sector->physicalLocation.value()); | ||||
|             for (const auto& sector : trackSectors) | ||||
|                 disk.sectorsByPhysicalLocation.emplace( | ||||
|                     sector->physicalLocation.value(), sector); | ||||
|  | ||||
|             if (outputFluxSink) | ||||
|             { | ||||
|                 std::cout << fmt::format("I+{:.2f}us with {:.2f}us clock\n", | ||||
|                     record->startTime / 1000.0, | ||||
|                     record->clock / 1000.0); | ||||
|                 hexdump(std::cout, record->rawData); | ||||
|                 std::cout << std::endl; | ||||
|                 for (const auto& data : trackFluxes) | ||||
|                     outputFluxSink->addFlux(data->ptl->physicalCylinder, | ||||
|                         data->ptl->physicalHead, | ||||
|                         *data->fluxmap); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         if (globalConfig()->decoder().dump_sectors()) | ||||
|         { | ||||
|             auto collected_sectors = collectSectors(trackFlux->sectors, false); | ||||
|             std::vector<std::shared_ptr<const Sector>> sorted_sectors( | ||||
|                 collected_sectors.begin(), collected_sectors.end()); | ||||
|             std::sort(sorted_sectors.begin(), | ||||
|                 sorted_sectors.end(), | ||||
|                 [](const auto& o1, const auto& o2) | ||||
|                 { | ||||
|                     return *o1 < *o2; | ||||
|                 }); | ||||
|  | ||||
|             std::cout << "\nDecoded sectors follow:\n\n"; | ||||
|             for (const auto& sector : sorted_sectors) | ||||
|             if (globalConfig()->decoder().dump_records()) | ||||
|             { | ||||
|                 std::cout << fmt::format( | ||||
|                     "{}.{:02}.{:02}: I+{:.2f}us with {:.2f}us clock: " | ||||
|                     "status {}\n", | ||||
|                     sector->logicalTrack, | ||||
|                     sector->logicalSide, | ||||
|                     sector->logicalSector, | ||||
|                     sector->headerStartTime / 1000.0, | ||||
|                     sector->clock / 1000.0, | ||||
|                     Sector::statusToString(sector->status)); | ||||
|                 hexdump(std::cout, sector->data); | ||||
|                 std::cout << std::endl; | ||||
|             } | ||||
|         } | ||||
|                 std::vector<std::shared_ptr<const Record>> sorted_records; | ||||
|  | ||||
|         /* track can't be modified below this point. */ | ||||
|         log(TrackReadLogMessage{trackFlux}); | ||||
|                 for (const auto& data : trackFluxes) | ||||
|                     sorted_records.insert(sorted_records.end(), | ||||
|                         data->records.begin(), | ||||
|                         data->records.end()); | ||||
|  | ||||
|                 std::sort(sorted_records.begin(), | ||||
|                     sorted_records.end(), | ||||
|                     [](const auto& o1, const auto& o2) | ||||
|                     { | ||||
|                         return o1->startTime < o2->startTime; | ||||
|                     }); | ||||
|  | ||||
|                 std::cout << "\nRaw (undecoded) records follow:\n\n"; | ||||
|                 for (const auto& record : sorted_records) | ||||
|                 { | ||||
|                     std::cout << fmt::format("I+{:.2f}us with {:.2f}us clock\n", | ||||
|                         record->startTime / 1000.0, | ||||
|                         record->clock / 1000.0); | ||||
|                     hexdump(std::cout, record->rawData); | ||||
|                     std::cout << std::endl; | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             if (globalConfig()->decoder().dump_sectors()) | ||||
|             { | ||||
|                 auto sectors = collectSectors(trackSectors, false); | ||||
|                 std::ranges::sort(sectors, | ||||
|                     [](const auto& o1, const auto& o2) | ||||
|                     { | ||||
|                         return *o1 < *o2; | ||||
|                     }); | ||||
|  | ||||
|                 std::cout << "\nDecoded sectors follow:\n\n"; | ||||
|                 for (const auto& sector : sectors) | ||||
|                 { | ||||
|                     std::cout << fmt::format( | ||||
|                         "{}.{:02}.{:02}: I+{:.2f}us with {:.2f}us clock: " | ||||
|                         "status {}\n", | ||||
|                         sector->logicalCylinder, | ||||
|                         sector->logicalHead, | ||||
|                         sector->logicalSector, | ||||
|                         sector->headerStartTime / 1000.0, | ||||
|                         sector->clock / 1000.0, | ||||
|                         Sector::statusToString(sector->status)); | ||||
|                     hexdump(std::cout, sector->data); | ||||
|                     std::cout << std::endl; | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             /* track can't be modified below this point. */ | ||||
|             log(TrackReadLogMessage{trackFluxes, trackSectors}); | ||||
|  | ||||
|             std::vector<std::shared_ptr<const Sector>> all_sectors; | ||||
|             for (auto& [ch, sector] : disk.sectorsByPhysicalLocation) | ||||
|                 all_sectors.push_back(sector); | ||||
|             all_sectors = collectSectors(all_sectors); | ||||
|             disk.image = std::make_shared<Image>(all_sectors); | ||||
|  | ||||
|             /* Log a _copy_ of the disk structure so that the logger | ||||
|              * doesn't see the disk get mutated in subsequent reads. */ | ||||
|             log(DiskReadLogMessage{std::make_shared<Disk>(disk)}); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     std::set<std::shared_ptr<const Sector>> all_sectors; | ||||
|     for (auto& track : diskflux->tracks) | ||||
|         for (auto& sector : track->sectors) | ||||
|             all_sectors.insert(sector); | ||||
|     all_sectors = collectSectors(all_sectors); | ||||
|     diskflux->image = std::make_shared<Image>(all_sectors); | ||||
|     if (!disk.image) | ||||
|         disk.image = std::make_shared<Image>(); | ||||
|  | ||||
|     /* diskflux can't be modified below this point. */ | ||||
|     log(DiskReadLogMessage{diskflux}); | ||||
|     log(EndOperationLogMessage{"Read complete"}); | ||||
|     return diskflux; | ||||
| } | ||||
|  | ||||
| void readDiskCommand( | ||||
|     FluxSource& fluxsource, Decoder& decoder, ImageWriter& writer) | ||||
| void readDiskCommand(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     ImageWriter& writer) | ||||
| { | ||||
|     auto diskflux = readDiskCommand(fluxsource, decoder); | ||||
|     Disk disk; | ||||
|     readDiskCommand(diskLayout, fluxSource, decoder, disk); | ||||
|  | ||||
|     writer.printMap(*diskflux->image); | ||||
|     writer.printMap(*disk.image); | ||||
|     if (globalConfig()->decoder().has_write_csv_to()) | ||||
|         writer.writeCsv( | ||||
|             *diskflux->image, globalConfig()->decoder().write_csv_to()); | ||||
|     writer.writeImage(*diskflux->image); | ||||
| } | ||||
|  | ||||
| void rawReadDiskCommand(FluxSource& fluxsource, FluxSink& fluxsink) | ||||
| { | ||||
|     log(BeginOperationLogMessage{"Performing raw read of disk"}); | ||||
|  | ||||
|     if (fluxsource.isHardware() || fluxsink.isHardware()) | ||||
|         measureDiskRotation(); | ||||
|     auto physicalLocations = Layout::computePhysicalLocations(); | ||||
|     unsigned index = 0; | ||||
|     for (const auto& physicalLocation : physicalLocations) | ||||
|     { | ||||
|         log(OperationProgressLogMessage{ | ||||
|             index * 100 / (int)physicalLocations.size()}); | ||||
|         index++; | ||||
|  | ||||
|         testForEmergencyStop(); | ||||
|         auto trackInfo = Layout::getLayoutOfTrackPhysical( | ||||
|             physicalLocation.cylinder, physicalLocation.head); | ||||
|         auto fluxSourceIterator = fluxsource.readFlux( | ||||
|             trackInfo->physicalTrack, trackInfo->physicalSide); | ||||
|  | ||||
|         log(BeginReadOperationLogMessage{ | ||||
|             trackInfo->physicalTrack, trackInfo->physicalSide}); | ||||
|         auto fluxmap = fluxSourceIterator->next(); | ||||
|         log(EndReadOperationLogMessage()); | ||||
|         log("{0} ms in {1} bytes", | ||||
|             (int)(fluxmap->duration() / 1e6), | ||||
|             fluxmap->bytes()); | ||||
|  | ||||
|         fluxsink.writeFlux( | ||||
|             trackInfo->physicalTrack, trackInfo->physicalSide, *fluxmap); | ||||
|     } | ||||
|  | ||||
|     log(EndOperationLogMessage{"Raw read complete"}); | ||||
|         writer.writeCsv(*disk.image, globalConfig()->decoder().write_csv_to()); | ||||
|     writer.writeImage(*disk.image); | ||||
| } | ||||
|   | ||||
| @@ -3,19 +3,20 @@ | ||||
|  | ||||
| #include "lib/data/locations.h" | ||||
|  | ||||
| class Disk; | ||||
| class Track; | ||||
| class Decoder; | ||||
| class DiskLayout; | ||||
| class Encoder; | ||||
| class DiskFlux; | ||||
| class FluxSink; | ||||
| class FluxSinkFactory; | ||||
| class FluxSource; | ||||
| class FluxSourceIteratorHolder; | ||||
| class Fluxmap; | ||||
| class Image; | ||||
| class ImageReader; | ||||
| class ImageWriter; | ||||
| class TrackInfo; | ||||
| class TrackFlux; | ||||
| class TrackDataFlux; | ||||
| class LogicalTrackLayout; | ||||
| class PhysicalTrackLayout; | ||||
| class Sector; | ||||
|  | ||||
| struct BeginSpeedOperationLogMessage | ||||
| @@ -29,12 +30,13 @@ struct EndSpeedOperationLogMessage | ||||
|  | ||||
| struct TrackReadLogMessage | ||||
| { | ||||
|     std::shared_ptr<const TrackFlux> track; | ||||
|     std::vector<std::shared_ptr<const Track>> tracks; | ||||
|     std::vector<std::shared_ptr<const Sector>> sectors; | ||||
| }; | ||||
|  | ||||
| struct DiskReadLogMessage | ||||
| { | ||||
|     std::shared_ptr<const DiskFlux> disk; | ||||
|     std::shared_ptr<const Disk> disk; | ||||
| }; | ||||
|  | ||||
| struct BeginReadOperationLogMessage | ||||
| @@ -45,7 +47,7 @@ struct BeginReadOperationLogMessage | ||||
|  | ||||
| struct EndReadOperationLogMessage | ||||
| { | ||||
|     std::shared_ptr<const TrackDataFlux> trackDataFlux; | ||||
|     std::shared_ptr<const Track> trackDataFlux; | ||||
|     std::set<std::shared_ptr<const Sector>> sectors; | ||||
| }; | ||||
|  | ||||
| @@ -74,43 +76,56 @@ struct OperationProgressLogMessage | ||||
|     unsigned progress; | ||||
| }; | ||||
|  | ||||
| extern void measureDiskRotation(); | ||||
|  | ||||
| extern void writeTracks(FluxSink& fluxSink, | ||||
| extern void writeTracks(const DiskLayout& diskLayout, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     const std::function<std::unique_ptr<const Fluxmap>( | ||||
|         std::shared_ptr<const TrackInfo>& layout)> producer, | ||||
|     std::vector<std::shared_ptr<const TrackInfo>>& locations); | ||||
|         const LogicalTrackLayout& ltl)> producer, | ||||
|     const std::vector<CylinderHead>& locations); | ||||
|  | ||||
| extern void writeTracksAndVerify(FluxSink& fluxSink, | ||||
| extern void writeTracksAndVerify(const DiskLayout& diskLayout, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Encoder& encoder, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     const Image& image, | ||||
|     std::vector<std::shared_ptr<const TrackInfo>>& locations); | ||||
|     const std::vector<CylinderHead>& locations); | ||||
|  | ||||
| extern void writeDiskCommand(const Image& image, | ||||
| extern void writeDiskCommand(const DiskLayout& diskLayout, | ||||
|     const Image& image, | ||||
|     Encoder& encoder, | ||||
|     FluxSink& fluxSink, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Decoder* decoder, | ||||
|     FluxSource* fluxSource, | ||||
|     const std::vector<CylinderHead>& locations); | ||||
|  | ||||
| extern void writeDiskCommand(const Image& image, | ||||
| extern void writeDiskCommand(const DiskLayout& diskLayout, | ||||
|     const Image& image, | ||||
|     Encoder& encoder, | ||||
|     FluxSink& fluxSink, | ||||
|     FluxSinkFactory& fluxSinkFactory, | ||||
|     Decoder* decoder = nullptr, | ||||
|     FluxSource* fluxSource = nullptr); | ||||
|  | ||||
| extern void writeRawDiskCommand(FluxSource& fluxSource, FluxSink& fluxSink); | ||||
| extern void writeRawDiskCommand(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     FluxSinkFactory& fluxSinkFactory); | ||||
|  | ||||
| extern std::shared_ptr<TrackFlux> readAndDecodeTrack(FluxSource& fluxSource, | ||||
| /* Reads a single group of tracks. tracks and combinedSectors are populated. | ||||
|  * tracks may contain preexisting data which will be taken into account. */ | ||||
|  | ||||
| extern void readAndDecodeTrack(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     std::shared_ptr<const TrackInfo>& layout); | ||||
|     const std::shared_ptr<const LogicalTrackLayout>& ltl, | ||||
|     std::vector<std::shared_ptr<const Track>>& tracks, | ||||
|     std::vector<std::shared_ptr<const Sector>>& combinedSectors); | ||||
|  | ||||
| extern std::shared_ptr<const DiskFlux> readDiskCommand( | ||||
|     FluxSource& fluxsource, Decoder& decoder); | ||||
| extern void readDiskCommand( | ||||
|     FluxSource& source, Decoder& decoder, ImageWriter& writer); | ||||
| extern void rawReadDiskCommand(FluxSource& source, FluxSink& sink); | ||||
| extern void readDiskCommand(const DiskLayout& diskLayout, | ||||
|     FluxSource& fluxSource, | ||||
|     Decoder& decoder, | ||||
|     Disk& disk); | ||||
| extern void readDiskCommand(const DiskLayout& diskLayout, | ||||
|     FluxSource& source, | ||||
|     Decoder& decoder, | ||||
|     ImageWriter& writer); | ||||
|  | ||||
| #endif | ||||
|   | ||||
| @@ -5,6 +5,7 @@ | ||||
| #include "lib/core/utils.h" | ||||
| #include <fstream> | ||||
| #include <google/protobuf/text_format.h> | ||||
| #include <fmt/ranges.h> | ||||
|  | ||||
| static Config config; | ||||
|  | ||||
| @@ -181,35 +182,8 @@ ConfigProto* Config::combined() | ||||
|     { | ||||
|         _combinedConfig = _baseConfig; | ||||
|  | ||||
|         /* First apply any standalone options. */ | ||||
|  | ||||
|         std::set<std::string> options = _appliedOptions; | ||||
|         for (const auto& option : _baseConfig.option()) | ||||
|         { | ||||
|             if (options.find(option.name()) != options.end()) | ||||
|             { | ||||
|                 _combinedConfig.MergeFrom(option.config()); | ||||
|                 options.erase(option.name()); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         /* Then apply any group options. */ | ||||
|  | ||||
|         for (auto& group : _baseConfig.option_group()) | ||||
|         { | ||||
|             const OptionProto* selectedOption = &*group.option().begin(); | ||||
|  | ||||
|             for (auto& option : group.option()) | ||||
|             { | ||||
|                 if (options.find(option.name()) != options.end()) | ||||
|                 { | ||||
|                     selectedOption = &option; | ||||
|                     options.erase(option.name()); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             _combinedConfig.MergeFrom(selectedOption->config()); | ||||
|         } | ||||
|         for (const auto& optionInfo : _appliedOptions) | ||||
|             _combinedConfig.MergeFrom(optionInfo.option->config()); | ||||
|  | ||||
|         /* Add in the user overrides. */ | ||||
|  | ||||
| @@ -237,55 +211,49 @@ void Config::clear() | ||||
|     _appliedOptions.clear(); | ||||
| } | ||||
|  | ||||
| static std::string getValidValues(const OptionGroupProto& group) | ||||
| { | ||||
|     return fmt::format("{}", | ||||
|         fmt::join( | ||||
|             std::views::transform(group.option(), &OptionProto::name), ", ")); | ||||
| } | ||||
|  | ||||
| std::vector<std::string> Config::validate() | ||||
| { | ||||
|     std::vector<std::string> results; | ||||
|  | ||||
|     std::set<std::string> optionNames = _appliedOptions; | ||||
|     std::set<const OptionProto*> appliedOptions; | ||||
|     for (const auto& option : _baseConfig.option()) | ||||
|     { | ||||
|         if (optionNames.find(option.name()) != optionNames.end()) | ||||
|     /* Ensure that only one item in each group is set. */ | ||||
|  | ||||
|     std::map<const OptionGroupProto*, const OptionProto*> optionsByGroup; | ||||
|     for (auto& [group, option, hasArgument] : _appliedOptions) | ||||
|         if (group) | ||||
|         { | ||||
|             appliedOptions.insert(&option); | ||||
|             optionNames.erase(option.name()); | ||||
|             auto& o = optionsByGroup[group]; | ||||
|             if (o) | ||||
|                 results.push_back( | ||||
|                     fmt::format("multiple mutually exclusive values set for " | ||||
|                                 "group '{}': valid values are: {}", | ||||
|                         group->comment(), | ||||
|                         getValidValues(*group))); | ||||
|             o = option; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /* Then apply any group options. */ | ||||
|     /* Ensure that every group has an option set. */ | ||||
|  | ||||
|     for (auto& group : _baseConfig.option_group()) | ||||
|     for (const auto& group : base()->option_group()) | ||||
|     { | ||||
|         int count = 0; | ||||
|  | ||||
|         for (auto& option : group.option()) | ||||
|         if (!optionsByGroup.contains(&group)) | ||||
|         { | ||||
|             if (optionNames.find(option.name()) != optionNames.end()) | ||||
|             { | ||||
|                 optionNames.erase(option.name()); | ||||
|                 appliedOptions.insert(&option); | ||||
|  | ||||
|                 count++; | ||||
|                 if (count == 2) | ||||
|                     results.push_back( | ||||
|                         fmt::format("multiple mutually exclusive options set " | ||||
|                                     "for group '{}'", | ||||
|                             group.comment())); | ||||
|             } | ||||
|             results.push_back( | ||||
|                 fmt::format("no value set for group '{}': valid values are: {}", | ||||
|                     group.comment(), | ||||
|                     getValidValues(group))); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /* Check for unknown options. */ | ||||
|  | ||||
|     if (!optionNames.empty()) | ||||
|     { | ||||
|         for (auto& name : optionNames) | ||||
|             results.push_back(fmt::format("'{}' is not a known option", name)); | ||||
|     } | ||||
|  | ||||
|     /* Check option requirements. */ | ||||
|  | ||||
|     for (auto& option : appliedOptions) | ||||
|     for (auto [group, option, hasArgument] : _appliedOptions) | ||||
|     { | ||||
|         try | ||||
|         { | ||||
| @@ -360,11 +328,12 @@ void Config::readBaseConfig(std::string data) | ||||
|         error("couldn't load external config proto"); | ||||
| } | ||||
|  | ||||
| const OptionProto& Config::findOption(const std::string& optionName) | ||||
| Config::OptionInfo Config::findOption( | ||||
|     const std::string& name, const std::string value) | ||||
| { | ||||
|     const OptionProto* found = nullptr; | ||||
|  | ||||
|     auto searchOptionList = [&](auto& optionList) | ||||
|     auto searchOptionList = [&](auto& optionList, const std::string& optionName) | ||||
|     { | ||||
|         for (const auto& option : optionList) | ||||
|         { | ||||
| @@ -377,17 +346,39 @@ const OptionProto& Config::findOption(const std::string& optionName) | ||||
|         return false; | ||||
|     }; | ||||
|  | ||||
|     if (searchOptionList(base()->option())) | ||||
|         return *found; | ||||
|     /* First look for any group names which match. */ | ||||
|  | ||||
|     if (!value.empty()) | ||||
|         for (const auto& optionGroup : base()->option_group()) | ||||
|             if (optionGroup.name() == name) | ||||
|             { | ||||
|                 /* The option must therefore be one of these. */ | ||||
|  | ||||
|                 if (searchOptionList(optionGroup.option(), value)) | ||||
|                     return {&optionGroup, found, true}; | ||||
|  | ||||
|                 throw OptionNotFoundException(fmt::format( | ||||
|                     "value {} is not valid for option {}; valid values are: {}", | ||||
|                     value, | ||||
|                     name, | ||||
|                     fmt::join(std::views::transform( | ||||
|                                   optionGroup.option(), &OptionProto::name), | ||||
|                         ", "))); | ||||
|             } | ||||
|  | ||||
|     /* Now search for individual options. */ | ||||
|  | ||||
|     if (searchOptionList(base()->option(), name)) | ||||
|         return {nullptr, found, false}; | ||||
|  | ||||
|     for (const auto& optionGroup : base()->option_group()) | ||||
|     { | ||||
|         if (searchOptionList(optionGroup.option())) | ||||
|             return *found; | ||||
|         if (optionGroup.name().empty()) | ||||
|             if (searchOptionList(optionGroup.option(), name)) | ||||
|                 return {&optionGroup, found, false}; | ||||
|     } | ||||
|  | ||||
|     throw OptionNotFoundException( | ||||
|         fmt::format("option {} not found", optionName)); | ||||
|     throw OptionNotFoundException(fmt::format("option {} not found", name)); | ||||
| } | ||||
|  | ||||
| void Config::checkOptionValid(const OptionProto& option) | ||||
| @@ -422,8 +413,7 @@ void Config::checkOptionValid(const OptionProto& option) | ||||
|             ss << ']'; | ||||
|  | ||||
|             throw InapplicableOptionException( | ||||
|                 fmt::format("option '{}' is inapplicable to this " | ||||
|                             "configuration " | ||||
|                 fmt::format("option '{}' is inapplicable to this configuration " | ||||
|                             "because {}={} could not be met", | ||||
|                     option.name(), | ||||
|                     req.key(), | ||||
| @@ -445,22 +435,66 @@ bool Config::isOptionValid(const OptionProto& option) | ||||
|     } | ||||
| } | ||||
|  | ||||
| bool Config::isOptionValid(std::string option) | ||||
| { | ||||
|     return isOptionValid(findOption(option)); | ||||
| } | ||||
|  | ||||
| void Config::applyOption(const OptionProto& option) | ||||
| void Config::applyOption(const OptionInfo& optionInfo) | ||||
| { | ||||
|     auto* option = optionInfo.option; | ||||
|     log(OptionLogMessage{ | ||||
|         option.has_message() ? option.message() : option.comment()}); | ||||
|         option->has_message() ? option->message() : option->comment()}); | ||||
|  | ||||
|     _appliedOptions.insert(option.name()); | ||||
|     _appliedOptions.insert(optionInfo); | ||||
| } | ||||
|  | ||||
| void Config::applyOption(std::string option) | ||||
| bool Config::applyOption(const std::string& name, const std::string value) | ||||
| { | ||||
|     applyOption(findOption(option)); | ||||
|     auto optionInfo = findOption(name, value); | ||||
|     applyOption(optionInfo); | ||||
|     return optionInfo.usesValue; | ||||
| } | ||||
|  | ||||
| void Config::applyOptionsFile(const std::string& data) | ||||
| { | ||||
|     if (!data.empty()) | ||||
|     { | ||||
|         for (auto setting : split(data, '\n')) | ||||
|         { | ||||
|             setting = trimWhitespace(setting); | ||||
|             if (setting.size() == 0) | ||||
|                 continue; | ||||
|             if (setting[0] == '#') | ||||
|                 continue; | ||||
|  | ||||
|             auto equals = setting.find('='); | ||||
|             if (equals == std::string::npos) | ||||
|                 error("Malformed setting line '{}'", setting); | ||||
|  | ||||
|             auto key = setting.substr(0, equals); | ||||
|             auto value = setting.substr(equals + 1); | ||||
|             globalConfig().set(key, value); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| void Config::applyDefaultOptions() | ||||
| { | ||||
|     std::set<const OptionGroupProto*> appliedOptionGroups; | ||||
|     for (auto& [group, option, hasArgument] : _appliedOptions) | ||||
|         if (group) | ||||
|             appliedOptionGroups.insert(group); | ||||
|  | ||||
|     /* For every group which doesn't have an option set, find the default and | ||||
|      * set it. */ | ||||
|  | ||||
|     for (const auto& group : base()->option_group()) | ||||
|     { | ||||
|         if (!appliedOptionGroups.contains(&group)) | ||||
|         { | ||||
|             for (const auto& option : group.option()) | ||||
|             { | ||||
|                 if (option.set_by_default()) | ||||
|                     applyOption({&group, &option, false}); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| void Config::clearOptions() | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user