Compare commits

...

75 Commits
windows ... usb

Author SHA1 Message Date
David Given
e833c27452 Tidy. 2024-10-01 00:10:09 +02:00
David Given
717ab9a525 Refactor to allow the USB system to be destructed after use. 2024-10-01 00:07:53 +02:00
David Given
fb6fa969a8 Update release script. 2024-09-28 17:05:41 +02:00
David Given
a4a83c6cfc Merge pull request #770 from davidgiven/ab
Update ab.
2024-09-28 16:55:52 +02:00
David Given
2c508cf51d Give artifacts unique names. 2024-09-28 16:08:59 +02:00
David Given
e02aa00d07 WSL2 works, so use it for the release script. 2024-09-28 13:35:48 +02:00
David Given
dc384c3635 Pre-release doesn't work. 2024-09-28 12:44:12 +02:00
David Given
69db44d1cf Try WSL 2. 2024-09-28 12:42:50 +02:00
David Given
6fdc9a252a Update ab. 2024-09-28 12:35:22 +02:00
David Given
11166a3c5c Try using the prerelease wsl. 2024-09-28 12:19:00 +02:00
David Given
3b2a3c6e3a Make the _progress script a bit more robust. 2024-09-28 12:12:58 +02:00
David Given
d890383ad2 Update ab. 2024-09-28 12:07:10 +02:00
David Given
4c4b6ee045 Try setting WSL1 explicitly. 2024-09-19 16:31:31 +02:00
David Given
a55196e7e5 Update ab. 2024-09-19 16:15:27 +02:00
David Given
866d5a2933 Update ab. 2024-09-19 00:55:45 +02:00
David Given
8cba89722b Mutter mutter. 2024-09-12 00:19:11 +02:00
David Given
6d1c623716 Typo fix. 2024-09-12 00:17:26 +02:00
David Given
a8c7ffc77d Update Fedora WSL. 2024-09-12 00:15:16 +02:00
David Given
fb05b6ac6d Merge pull request #769 from davidgiven/ab
Update ab.
2024-09-11 23:17:11 +02:00
David Given
0e83b2e7df Try building with macos-13. 2024-09-11 21:37:22 +02:00
David Given
1baaa4402d Merge. 2024-09-10 21:24:50 +02:00
David Given
2af61e4aca Update ab. 2024-09-10 21:22:17 +02:00
David Given
db235dae5e Merge. 2024-09-10 21:21:54 +02:00
David Given
241878bd0e New fmt requires a slight API change. 2024-09-10 21:12:58 +02:00
David Given
1386e343ec New fmt requires a slight API change. 2024-09-10 21:12:58 +02:00
David Given
9ff51ec8ef Update CI script. 2024-09-10 19:53:44 +02:00
David Given
45036b708f Update CI script. 2024-09-10 19:53:44 +02:00
David Given
ec3b5b10df Adjust default Brother head_bias, and fix the documentation.
Fixes: #768
2024-09-10 18:32:03 +02:00
David Given
4817298dbb Update ab. 2024-09-10 17:55:45 +02:00
David Given
af0ce4cf35 Update ab. 2024-09-02 23:51:03 +02:00
David Given
3c3d8d080c Merge pull request #763 from davidgiven/protos
Encode all the protos in one go (per library), as it's vastly faster.
2024-08-12 17:43:35 +02:00
David Given
dc6af483a5 Remember to build the drivetypes table. 2024-08-12 17:32:13 +02:00
David Given
9a0b487f4b Remember to build the formats table. 2024-08-12 17:26:28 +02:00
David Given
cac4d1ce86 Encode all the protos in one go (per library), as it's vastly faster. 2024-08-12 12:36:39 +02:00
David Given
7a3a31a929 Merge pull request #759 from davidgiven/a2r
Improve the A2R writer.
2024-07-31 23:45:51 +02:00
David Given
eee6f95b15 Typo fix. 2024-07-31 13:48:06 +02:00
David Given
7a3d10451d Rework the A2R writer to be a bit less broken. 2024-07-30 22:54:59 +02:00
David Given
e4f1a5a06f Merge pull request #752 from davidgiven/tartu
Add encoder support for the Tartu format.
2024-05-14 21:48:36 +02:00
David Given
500fcde21b Merge. 2024-05-14 21:41:30 +02:00
David Given
eb363a4b2a Update Tartu documentation. 2024-05-14 21:40:50 +02:00
David Given
8a78e609b0 And fix everywhere else... 2024-05-13 23:41:37 +02:00
David Given
15c67b8cc1 Bash into workingness on OSX. 2024-05-13 23:27:53 +02:00
David Given
00e9c5a07f Add support for updating file metadata (only the SRA bits, really). 2024-05-13 21:44:58 +02:00
David Given
7643457374 Add support for renaming files. 2024-05-13 21:12:42 +02:00
David Given
78d5584e21 Add creeate, put and delete support to the CP/M filesystem driver. 2024-05-13 00:32:57 +02:00
David Given
1d1143a893 Merge from master. 2024-05-10 00:19:57 +02:00
David Given
91093e1304 Merge pull request #754 from davidgiven/greaseweazle
Correctly twiddle DTR on Linux/OSX when changing baud rates.
2024-05-01 19:11:57 +02:00
David Given
1175a06f3d Merge from master. 2024-05-01 16:23:59 +02:00
David Given
6e5abd1189 Merge from master. 2024-05-01 16:23:38 +02:00
David Given
34f97384e7 Merge pull request #753 from davidgiven/osx
Fix OSX build problems.
2024-05-01 16:23:09 +02:00
David Given
653a6a0189 Be more consistent about DTR toggling (needed to reset serial devices). 2024-05-01 12:54:22 +02:00
David Given
f0b1b61eac Merge pull request #749 from p-j-b/fix-hang-windows-adafruit-floppy-greaseweazle
Set DTR after calling SetCommState
2024-05-01 12:46:08 +02:00
David Given
c0fd121bdf Restore build script to normal. 2024-05-01 00:25:41 +02:00
David Given
b805b86ddb Fix truncate arg ordering because of stupid OSX. 2024-05-01 00:25:10 +02:00
David Given
654e7e750c Fix truncate arg ordering because of stupid OSX. 2024-05-01 00:25:10 +02:00
David Given
7501fcfe8b Looks like compiling protobuf files now requires access to the protobuf
libraries.
2024-05-01 00:18:18 +02:00
David Given
fdb7837e03 Looks like compiling protobuf files now requires access to the protobuf
libraries.
2024-05-01 00:18:18 +02:00
David Given
1c57cea483 Try and debug the OSX build failure. 2024-05-01 00:00:30 +02:00
David Given
0c8e8d4d69 Remember to mark the 40-track format as being such. 2024-04-30 23:09:45 +02:00
David Given
8876aae2cc Calculate gaps in bits, not bytes (more accurate). Pad the end of the track to
avoid weirdness reading the last sector.
2024-04-30 23:09:30 +02:00
David Given
3e053b32e2 Display a useful command to repeat a test if one fails. 2024-04-30 23:07:10 +02:00
David Given
0611728537 Don't try to change the build system just yet. 2024-04-30 21:32:58 +02:00
David Given
a84cf83ce5 Add a prototype Tartu encoder. 2024-04-30 00:56:26 +02:00
David Given
c064aa7862 Merge pull request #751 from davidgiven/tartu
Add support for the Tartu Palivere.
2024-04-23 22:21:05 +02:00
David Given
195f7126cc Update link. 2024-04-23 21:58:42 +02:00
David Given
50d466c9c1 Update Tartu documentation. 2024-04-23 21:56:42 +02:00
David Given
5763574634 Update documentation. 2024-04-21 01:18:17 +02:00
David Given
2da568b3e8 Update the Tartu documentation. 2024-04-21 00:35:39 +02:00
David Given
2732d9aec8 Get the Tartu checksums working, and hook up the CP/M filesystem code. 2024-04-21 00:17:11 +02:00
David Given
15d34aff15 Work-in-progress Tartu decoder. 2024-04-20 01:20:49 +02:00
David Given
af3e257c78 Add boilerplate for the Tartu. 2024-04-19 21:10:49 +02:00
p-j-b
c2248c7e4a Added CLRDTR and SETDTR to setBaudRate
Fixes hang in Windows with Adafruit Floppy GreaseWeazle
2024-04-02 13:05:29 +01:00
David Given
a7967b6dc3 More release script tweaks. 2024-03-31 22:50:55 +02:00
David Given
c1f47921e6 Adjust release script. 2024-03-31 22:38:21 +02:00
David Given
cda93d516b Merge pull request #748 from davidgiven/windows
Switch from MSYS builds to WSL/Fedora builds.
2024-03-31 22:31:36 +02:00
69 changed files with 2067 additions and 958 deletions

View File

@@ -10,11 +10,11 @@ jobs:
build-linux:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
repository: 'davidgiven/fluxengine'
path: 'fluxengine'
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
repository: 'davidgiven/fluxengine-testdata'
path: 'fluxengine-testdata'
@@ -25,13 +25,13 @@ jobs:
run: CXXFLAGS="-Wp,-D_GLIBCXX_ASSERTIONS" make -j`nproc` -C fluxengine
build-macos-current:
runs-on: macos-latest
runs-on: macos-13
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
repository: 'davidgiven/fluxengine'
path: 'fluxengine'
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
repository: 'davidgiven/fluxengine-testdata'
path: 'fluxengine-testdata'
@@ -41,9 +41,9 @@ jobs:
run: gmake -j`nproc` -C fluxengine
- name: Upload build artifacts
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: ${{ github.event.repository.name }}.${{ github.sha }}
name: ${{ github.event.repository.name }}.${{ github.sha }}.fluxengine.pkg
path: fluxengine/FluxEngine.pkg
build-windows:
@@ -52,10 +52,11 @@ jobs:
steps:
- name: setup WSL
run: |
curl -L https://github.com/WhitewaterFoundry/Fedora-Remix-for-WSL/releases/download/39.0.1/Fedora-Remix-for-WSL-SL_39.0.1.0_x64_arm64.msixbundle -o fedora.msixbundle
unzip fedora.msixbundle Fedora-Remix-for-WSL-SL_39.0.1.0_x64.msix
unzip Fedora-Remix-for-WSL-SL_39.0.1.0_x64.msix install.tar.gz
curl -L https://github.com/WhitewaterFoundry/Fedora-Remix-for-WSL/releases/download/40.1.0/Fedora-Remix-for-WSL-SL_40.1.0.0_x64_arm64.msixbundle -o fedora.msixbundle
unzip fedora.msixbundle Fedora-Remix-for-WSL-SL_40.1.0.0_x64.msix
unzip Fedora-Remix-for-WSL-SL_40.1.0.0_x64.msix install.tar.gz
wsl --update
wsl --set-default-version 2
wsl --import fedora fedora install.tar.gz
wsl --set-default fedora
wsl sh -c 'dnf -y install https://github.com/rpmsphere/noarch/raw/master/r/rpmsphere-release-38-1.noarch.rpm'
@@ -91,7 +92,7 @@ jobs:
wsl sh -c 'cd fluxengine && zip -9 fluxengine-windows.zip fluxengine.exe fluxengine-gui.exe upgrade-flux-file.exe brother120tool.exe brother240tool.exe FluxEngine.cydsn/CortexM3/ARM_GCC_541/Release/FluxEngine.hex fluxengine-installer.exe'
- name: Upload build artifacts
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: ${{ github.event.repository.name }}.${{ github.sha }}
name: ${{ github.event.repository.name }}.${{ github.sha }}.windows.zip
path: fluxengine/fluxengine-windows.zip

View File

@@ -20,6 +20,7 @@ jobs:
unzip fedora.msixbundle Fedora-Remix-for-WSL-SL_39.0.1.0_x64.msix
unzip Fedora-Remix-for-WSL-SL_39.0.1.0_x64.msix install.tar.gz
wsl --update
wsl --set-default-version 2
wsl --import fedora fedora install.tar.gz
wsl --set-default fedora
wsl sh -c 'dnf -y install https://github.com/rpmsphere/noarch/raw/master/r/rpmsphere-release-38-1.noarch.rpm'
@@ -33,20 +34,21 @@ jobs:
- uses: actions/checkout@v4
with:
repository: 'davidgiven/fluxengine'
path: 'fluxengine'
- name: run
run: |
wsl sh -c 'make BUILDTYPE=windows -j$(nproc)'
wsl sh -c 'cd fluxengine && make BUILDTYPE=windows -j$(nproc)'
- name: nsis
run: |
wsl sh -c 'strip fluxengine.exe -o fluxengine-stripped.exe'
wsl sh -c 'strip fluxengine-gui.exe -o fluxengine-gui-stripped.exe'
wsl sh -c 'makensis -v2 -nocd -dOUTFILE=fluxengine-installer.exe extras/windows-installer.nsi'
wsl sh -c 'cd fluxengine && strip fluxengine.exe -o fluxengine-stripped.exe'
wsl sh -c 'cd fluxengine && strip fluxengine-gui.exe -o fluxengine-gui-stripped.exe'
wsl sh -c 'cd fluxengine && makensis -v2 -nocd -dOUTFILE=fluxengine-installer.exe extras/windows-installer.nsi'
- name: zip
run: |
wsl sh -c 'zip -9 fluxengine-windows.zip fluxengine.exe fluxengine-gui.exe upgrade-flux-file.exe brother120tool.exe brother240tool.exe FluxEngine.cydsn/CortexM3/ARM_GCC_541/Release/FluxEngine.hex fluxengine-installer.exe'
wsl sh -c 'cd fluxengine && zip -9 fluxengine-windows.zip fluxengine.exe fluxengine-gui.exe upgrade-flux-file.exe brother120tool.exe brother240tool.exe FluxEngine.cydsn/CortexM3/ARM_GCC_541/Release/FluxEngine.hex fluxengine-installer.exe'
- name: date
run: |
@@ -57,6 +59,7 @@ jobs:
with:
tag-name: dev
force-branch: false
git-directory: 'fluxengine'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -75,16 +78,16 @@ jobs:
with:
name: Development build ${{ env.RELEASE_DATE }}
files: |
fluxengine.zip
fluxengine-installer.exe
fluxengine/fluxengine.zip
fluxengine/fluxengine-installer.exe
tag_name: dev
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build-macos:
runs-on: macos-latest
runs-on: macos-13
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: brew
run: brew install sqlite pkg-config libusb protobuf wxwidgets fmt make coreutils dylibbundler libjpeg

View File

@@ -72,7 +72,7 @@ all: +all README.md
binaries: all
tests: all
README.md: $(OBJ)/scripts/+mkdocindex/+mkdocindex$(EXT)
README.md: $(OBJ)/scripts/+mkdocindex/mkdocindex$(EXT)
@echo MKDOC $@
@csplit -s -f$(OBJ)/README. README.md '/<!-- FORMATSSTART -->/' '%<!-- FORMATSEND -->%'
@(cat $(OBJ)/README.00 && $< && cat $(OBJ)/README.01) > README.md

View File

@@ -103,45 +103,46 @@ particular filesystem and can read (and sometimes write, support varies) files
directly from disks, flux files or disk images. Some formats have multiple
choices because they can store multiple types of file system.
<!-- FORMATSSTART -->
<!-- This section is automatically generated. Do not edit. -->
| Profile | Format | Read? | Write? | Filesystem? |
|:--------|:-------|:-----:|:------:|:------------|
| [`acornadfs`](doc/disk-acornadfs.md) | Acorn ADFS: BBC Micro, Archimedes | 🦖 | | |
| [`acorndfs`](doc/disk-acorndfs.md) | Acorn DFS: Acorn Atom, BBC Micro series | 🦄 | | ACORNDFS |
| [`aeslanier`](doc/disk-aeslanier.md) | AES Lanier "No Problem": 616kB 5.25" 77-track SSDD hard sectored | 🦖 | | |
| [`agat`](doc/disk-agat.md) | Agat: 840kB 5.25" 80-track DS | 🦖 | 🦖 | |
| [`amiga`](doc/disk-amiga.md) | Amiga: 880kB 3.5" DSDD | 🦄 | 🦄 | AMIGAFFS |
| [`ampro`](doc/disk-ampro.md) | Ampro Little Board: CP/M | 🦖 | | CPMFS |
| [`apple2`](doc/disk-apple2.md) | Apple II: Prodos, Appledos, and CP/M | 🦄 | 🦄 | APPLEDOS CPMFS PRODOS |
| [`atarist`](doc/disk-atarist.md) | Atari ST: Almost PC compatible | 🦄 | 🦄 | |
| [`bk`](doc/disk-bk.md) | BK: 800kB 5.25"/3.5" 80-track 10-sector DSDD | 🦖 | 🦖 | |
| [`brother`](doc/disk-brother.md) | Brother word processors: GCR family | 🦄 | 🦄 | BROTHER120 FATFS |
| [`commodore`](doc/disk-commodore.md) | Commodore: 1541, 1581, 8050 and variations | 🦄 | 🦄 | CBMFS |
| [`eco1`](doc/disk-eco1.md) | VDS Eco1: CP/M; 1210kB 77-track mixed format DSHD | 🦖 | | CPMFS |
| [`epsonpf10`](doc/disk-epsonpf10.md) | Epson PF-10: CP/M; 3.5" 40-track DSDD | 🦖 | | CPMFS |
| [`f85`](doc/disk-f85.md) | Durango F85: 461kB 5.25" 77-track SS | 🦖 | | |
| [`fb100`](doc/disk-fb100.md) | Brother FB-100: 100kB 3.5" 40-track SSSD | 🦖 | | |
| [`hplif`](doc/disk-hplif.md) | Hewlett-Packard LIF: a variety of disk formats used by HP | 🦄 | 🦄 | LIF |
| [`ibm`](doc/disk-ibm.md) | IBM PC: Generic PC 3.5"/5.25" disks | 🦄 | 🦄 | FATFS |
| [`icl30`](doc/disk-icl30.md) | ICL Model 30: CP/M; 263kB 35-track DSSD | 🦖 | | CPMFS |
| [`mac`](doc/disk-mac.md) | Macintosh: 400kB/800kB 3.5" GCR | 🦄 | 🦄 | MACHFS |
| [`micropolis`](doc/disk-micropolis.md) | Micropolis: 100tpi MetaFloppy disks | 🦄 | 🦄 | |
| [`ms2000`](doc/disk-ms2000.md) | : MS2000 Microdisk Development System | | | MICRODOS |
| [`mx`](doc/disk-mx.md) | DVK MX: Soviet-era PDP-11 clone | 🦖 | | |
| [`n88basic`](doc/disk-n88basic.md) | N88-BASIC: PC8800/PC98 5.25" 77-track 26-sector DSHD | 🦄 | 🦄 | |
| [`northstar`](doc/disk-northstar.md) | Northstar: 5.25" hard sectored | 🦄 | 🦄 | |
| [`psos`](doc/disk-psos.md) | pSOS: 800kB DSDD with PHILE | 🦄 | 🦄 | PHILE |
| [`rolandd20`](doc/disk-rolandd20.md) | Roland D20: 3.5" electronic synthesiser disks | 🦄 | 🦖 | ROLAND |
| [`rx50`](doc/disk-rx50.md) | Digital RX50: 400kB 5.25" 80-track 10-sector SSDD | 🦖 | 🦖 | |
| [`smaky6`](doc/disk-smaky6.md) | Smaky 6: 308kB 5.25" 77-track 16-sector SSDD, hard sectored | 🦖 | | SMAKY6 |
| [`tids990`](doc/disk-tids990.md) | Texas Instruments DS990: 1126kB 8" DSSD | 🦖 | 🦖 | |
| [`tiki`](doc/disk-tiki.md) | Tiki 100: CP/M | | | CPMFS |
| [`victor9k`](doc/disk-victor9k.md) | Victor 9000 / Sirius One: 1224kB 5.25" DSDD GCR | 🦖 | 🦖 | |
| [`zilogmcz`](doc/disk-zilogmcz.md) | Zilog MCZ: 320kB 8" 77-track SSSD hard-sectored | 🦖 | | ZDOS |
{: .datatable }
<!-- FORMATSSTART -->
<!-- This section is automatically generated. Do not edit. -->
| Profile | Format | Read? | Write? | Filesystem? |
|:--------|:-------|:-----:|:------:|:------------|
| [`acornadfs`](doc/disk-acornadfs.md) | Acorn ADFS: BBC Micro, Archimedes | 🦖 | | |
| [`acorndfs`](doc/disk-acorndfs.md) | Acorn DFS: Acorn Atom, BBC Micro series | 🦄 | | ACORNDFS |
| [`aeslanier`](doc/disk-aeslanier.md) | AES Lanier "No Problem": 616kB 5.25" 77-track SSDD hard sectored | 🦖 | | |
| [`agat`](doc/disk-agat.md) | Agat: 840kB 5.25" 80-track DS | 🦖 | 🦖 | |
| [`amiga`](doc/disk-amiga.md) | Amiga: 880kB 3.5" DSDD | 🦄 | 🦄 | AMIGAFFS |
| [`ampro`](doc/disk-ampro.md) | Ampro Little Board: CP/M | 🦖 | | CPMFS |
| [`apple2`](doc/disk-apple2.md) | Apple II: Prodos, Appledos, and CP/M | 🦄 | 🦄 | APPLEDOS CPMFS PRODOS |
| [`atarist`](doc/disk-atarist.md) | Atari ST: Almost PC compatible | 🦄 | 🦄 | |
| [`bk`](doc/disk-bk.md) | BK: 800kB 5.25"/3.5" 80-track 10-sector DSDD | 🦖 | 🦖 | |
| [`brother`](doc/disk-brother.md) | Brother word processors: GCR family | 🦄 | 🦄 | BROTHER120 FATFS |
| [`commodore`](doc/disk-commodore.md) | Commodore: 1541, 1581, 8050 and variations | 🦄 | 🦄 | CBMFS |
| [`eco1`](doc/disk-eco1.md) | VDS Eco1: CP/M; 1210kB 77-track mixed format DSHD | 🦖 | | CPMFS |
| [`epsonpf10`](doc/disk-epsonpf10.md) | Epson PF-10: CP/M; 3.5" 40-track DSDD | 🦖 | | CPMFS |
| [`f85`](doc/disk-f85.md) | Durango F85: 461kB 5.25" 77-track SS | 🦖 | | |
| [`fb100`](doc/disk-fb100.md) | Brother FB-100: 100kB 3.5" 40-track SSSD | 🦖 | | |
| [`hplif`](doc/disk-hplif.md) | Hewlett-Packard LIF: a variety of disk formats used by HP | 🦄 | 🦄 | LIF |
| [`ibm`](doc/disk-ibm.md) | IBM PC: Generic PC 3.5"/5.25" disks | 🦄 | 🦄 | FATFS |
| [`icl30`](doc/disk-icl30.md) | ICL Model 30: CP/M; 263kB 35-track DSSD | 🦖 | | CPMFS |
| [`mac`](doc/disk-mac.md) | Macintosh: 400kB/800kB 3.5" GCR | 🦄 | 🦄 | MACHFS |
| [`micropolis`](doc/disk-micropolis.md) | Micropolis: 100tpi MetaFloppy disks | 🦄 | 🦄 | |
| [`ms2000`](doc/disk-ms2000.md) | : MS2000 Microdisk Development System | | | MICRODOS |
| [`mx`](doc/disk-mx.md) | DVK MX: Soviet-era PDP-11 clone | 🦖 | | |
| [`n88basic`](doc/disk-n88basic.md) | N88-BASIC: PC8800/PC98 5.25" 77-track 26-sector DSHD | 🦄 | 🦄 | |
| [`northstar`](doc/disk-northstar.md) | Northstar: 5.25" hard sectored | 🦄 | 🦄 | |
| [`psos`](doc/disk-psos.md) | pSOS: 800kB DSDD with PHILE | 🦄 | 🦄 | PHILE |
| [`rolandd20`](doc/disk-rolandd20.md) | Roland D20: 3.5" electronic synthesiser disks | 🦄 | 🦖 | ROLAND |
| [`rx50`](doc/disk-rx50.md) | Digital RX50: 400kB 5.25" 80-track 10-sector SSDD | 🦖 | 🦖 | |
| [`smaky6`](doc/disk-smaky6.md) | Smaky 6: 308kB 5.25" 77-track 16-sector SSDD, hard sectored | 🦖 | | SMAKY6 |
| [`tartu`](doc/disk-tartu.md) | Tartu: The Palivere and variations | 🦄 | 🦖 | CPMFS |
| [`tids990`](doc/disk-tids990.md) | Texas Instruments DS990: 1126kB 8" DSSD | 🦖 | 🦖 | |
| [`tiki`](doc/disk-tiki.md) | Tiki 100: CP/M | | | CPMFS |
| [`victor9k`](doc/disk-victor9k.md) | Victor 9000 / Sirius One: 1224kB 5.25" DSDD GCR | 🦖 | 🦖 | |
| [`zilogmcz`](doc/disk-zilogmcz.md) | Zilog MCZ: 320kB 8" 77-track SSSD hard-sectored | 🦖 | | ZDOS |
{: .datatable }
<!-- FORMATSEND -->
### Notes

View File

@@ -19,6 +19,7 @@ proto(
"./northstar/northstar.proto",
"./rolandd20/rolandd20.proto",
"./smaky6/smaky6.proto",
"./tartu/tartu.proto",
"./tids990/tids990.proto",
"./victor9k/victor9k.proto",
"./zilogmcz/zilogmcz.proto",

View File

@@ -1,6 +1,4 @@
syntax = "proto2";
import "lib/common.proto";
message Smaky6DecoderProto {}

84
arch/tartu/decoder.cc Normal file
View File

@@ -0,0 +1,84 @@
#include "lib/globals.h"
#include "lib/decoders/decoders.h"
#include "arch/tartu/tartu.h"
#include "lib/crc.h"
#include "lib/fluxmap.h"
#include "lib/decoders/fluxmapreader.h"
#include "lib/sector.h"
#include <string.h>
constexpr uint64_t HEADER_BITS = 0xaaaaaaaa44895554LL;
constexpr uint64_t DATA_BITS = 0xaaaaaaaa44895545LL;
static const FluxPattern HEADER_PATTERN(64, HEADER_BITS);
static const FluxPattern DATA_PATTERN(64, DATA_BITS);
const FluxMatchers ANY_RECORD_PATTERN {
&HEADER_PATTERN,
&DATA_PATTERN
};
class TartuDecoder : public Decoder
{
public:
TartuDecoder(const DecoderProto& config):
Decoder(config),
_config(config.tartu())
{
}
void beginTrack() override
{
}
nanoseconds_t advanceToNextRecord() override
{
return seekToPattern(ANY_RECORD_PATTERN);
}
void decodeSectorRecord() override
{
if (readRaw64() != HEADER_BITS)
return;
auto bits = readRawBits(16 * 4);
auto bytes = decodeFmMfm(bits).slice(0, 4);
ByteReader br(bytes);
uint8_t track = br.read_8();
_sector->logicalTrack = track >> 1;
_sector->logicalSide = track & 1;
br.skip(1); /* seems always to be 1 */
_sector->logicalSector = br.read_8();
uint8_t wantChecksum = br.read_8();
uint8_t gotChecksum = ~sumBytes(bytes.slice(0, 3));
if (wantChecksum == gotChecksum)
_sector->status = Sector::DATA_MISSING;
_sector->status = Sector::DATA_MISSING;
}
void decodeDataRecord() override
{
if (readRaw64() != DATA_BITS)
return;
const auto& bits = readRawBits(129 * 16);
const auto& bytes = decodeFmMfm(bits).slice(0, 129);
_sector->data = bytes.slice(0, 128);
uint8_t wantChecksum = bytes.reader().seek(128).read_8();
uint8_t gotChecksum = ~sumBytes(_sector->data);
_sector->status = (wantChecksum == gotChecksum) ? Sector::OK : Sector::BAD_CHECKSUM;
}
private:
const TartuDecoderProto& _config;
};
std::unique_ptr<Decoder> createTartuDecoder(const DecoderProto& config)
{
return std::unique_ptr<Decoder>(new TartuDecoder(config));
}

114
arch/tartu/encoder.cc Normal file
View File

@@ -0,0 +1,114 @@
#include "lib/globals.h"
#include "lib/decoders/decoders.h"
#include "lib/encoders/encoders.h"
#include "arch/tartu/tartu.h"
#include "lib/crc.h"
#include "lib/fluxmap.h"
#include "lib/sector.h"
#include <string.h>
class TartuEncoder : public Encoder
{
public:
TartuEncoder(const EncoderProto& config):
Encoder(config),
_config(config.tartu())
{
}
std::unique_ptr<Fluxmap> encode(std::shared_ptr<const TrackInfo>& trackInfo,
const std::vector<std::shared_ptr<const Sector>>& sectors,
const Image& image) override
{
_clockRateUs = _config.clock_period_us();
int bitsPerRevolution =
(_config.target_rotational_period_ms() * 1000.0) / _clockRateUs;
const auto& sector = *sectors.begin();
_bits.resize(bitsPerRevolution);
_cursor = 0;
writeFillerRawBitsUs(_config.gap1_us());
bool first = true;
for (const auto& sectorData : sectors)
{
if (!first)
writeFillerRawBitsUs(_config.gap4_us());
first = false;
writeSector(sectorData);
}
if (_cursor > _bits.size())
error("track data overrun");
writeFillerRawBitsUs(_config.target_rotational_period_ms() * 1000.0);
std::unique_ptr<Fluxmap> fluxmap(new Fluxmap);
fluxmap->appendBits(_bits,
calculatePhysicalClockPeriod(_clockRateUs * 1e3,
_config.target_rotational_period_ms() * 1e6));
return fluxmap;
}
private:
void writeBytes(const Bytes& bytes)
{
encodeMfm(_bits, _cursor, bytes, _lastBit);
}
void writeRawBits(uint64_t data, int width)
{
_cursor += width;
_lastBit = data & 1;
for (int i = 0; i < width; i++)
{
unsigned pos = _cursor - i - 1;
if (pos < _bits.size())
_bits[pos] = data & 1;
data >>= 1;
}
}
void writeFillerRawBitsUs(double us)
{
unsigned count = (us / _clockRateUs) / 2;
for (int i = 0; i < count; i++)
writeRawBits(0b10, 2);
};
void writeSector(const std::shared_ptr<const Sector>& sectorData)
{
writeRawBits(_config.header_marker(), 64);
{
Bytes bytes;
ByteWriter bw(bytes);
bw.write_8(
(sectorData->logicalTrack << 1) | sectorData->logicalSide);
bw.write_8(1);
bw.write_8(sectorData->logicalSector);
bw.write_8(~sumBytes(bytes.slice(0, 3)));
writeBytes(bytes);
}
writeFillerRawBitsUs(_config.gap3_us());
writeRawBits(_config.data_marker(), 64);
{
Bytes bytes;
ByteWriter bw(bytes);
bw.append(sectorData->data);
bw.write_8(~sumBytes(bytes.slice(0, sectorData->data.size())));
writeBytes(bytes);
}
}
private:
const TartuEncoderProto& _config;
double _clockRateUs;
std::vector<bool> _bits;
unsigned _cursor;
bool _lastBit;
};
std::unique_ptr<Encoder> createTartuEncoder(const EncoderProto& config)
{
return std::unique_ptr<Encoder>(new TartuEncoder(config));
}

8
arch/tartu/tartu.h Normal file
View File

@@ -0,0 +1,8 @@
#ifndef TARTU_H
#define TARTU_H
extern std::unique_ptr<Decoder> createTartuDecoder(const DecoderProto& config);
extern std::unique_ptr<Encoder> createTartuEncoder(const EncoderProto& config);
#endif

27
arch/tartu/tartu.proto Normal file
View File

@@ -0,0 +1,27 @@
syntax = "proto2";
import "lib/common.proto";
message TartuDecoderProto {}
message TartuEncoderProto {
optional double clock_period_us = 1
[ default = 2.0, (help) = "clock rate on the real device (for MFM)" ];
optional double target_rotational_period_ms = 2
[ default=200, (help) = "rotational period of target disk" ];
optional double gap1_us = 3
[ default = 1200,
(help) = "size of gap 1 (the post-index gap)" ];
optional double gap3_us = 4
[ default = 150,
(help) = "size of gap 3 (the pre-data gap)" ];
optional double gap4_us = 5
[ default = 180,
(help) = "size of gap 4 (the post-data or format gap)" ];
optional uint64 header_marker = 6
[ default = 0xaaaaaaaa44895554,
(help) = "64-bit raw bit pattern of header record marker" ];
optional uint64 data_marker = 7
[ default = 0xaaaaaaaa44895545,
(help) = "64-bit raw bit pattern of data record marker" ];
}

View File

@@ -143,6 +143,8 @@ cxxlibrary(
"./arch/northstar/encoder.cc",
"./arch/rolandd20/decoder.cc",
"./arch/smaky6/decoder.cc",
"./arch/tartu/decoder.cc",
"./arch/tartu/encoder.cc",
"./arch/tids990/decoder.cc",
"./arch/tids990/encoder.cc",
"./arch/victor9k/decoder.cc",
@@ -175,6 +177,7 @@ cxxlibrary(
"arch/micropolis/micropolis.h": "./arch/micropolis/micropolis.h",
"arch/c64/data_gcr.h": "./arch/c64/data_gcr.h",
"arch/c64/c64.h": "./arch/c64/c64.h",
"arch/tartu/tartu.h": "./arch/tartu/tartu.h",
"lib/a2r.h": "./lib/a2r.h",
"lib/bitmap.h": "./lib/bitmap.h",
"lib/bytes.h": "./lib/bytes.h",
@@ -278,6 +281,8 @@ else:
("mac", "scripts/mac800_test.textpb", "--800"),
("n88basic", "", ""),
("rx50", "", ""),
("tartu", "", "--390 40track_drive"),
("tartu", "", "--780"),
("tids990", "", ""),
("victor9k", "", "--612"),
("victor9k", "", "--1224"),

5
build/_progress.py Normal file
View File

@@ -0,0 +1,5 @@
import sys
(_, current, max) = sys.argv
percent = int(100 * float(current) / float(max))
print(f"[{percent:>3}%]")

View File

@@ -28,9 +28,15 @@ ifeq ($(OS), Windows_NT)
endif
EXT ?=
ifeq ($(PROGRESSINFO),)
rulecount := $(shell $(MAKE) --no-print-directory -q $(OBJ)/build.mk PROGRESSINFO=1 && $(MAKE) -n $(MAKECMDGOALS) PROGRESSINFO=XXXPROGRESSINFOXXX | grep XXXPROGRESSINFOXXX | wc -l)
ruleindex := 1
PROGRESSINFO = "$(shell $(PYTHON) build/_progress.py $(ruleindex) $(rulecount))$(eval ruleindex := $(shell expr $(ruleindex) + 1))"
endif
include $(OBJ)/build.mk
MAKEFLAGS += -r
MAKEFLAGS += -r -j$(shell nproc)
.DELETE_ON_ERROR:
.PHONY: update-ab
@@ -47,9 +53,8 @@ clean::
export PYTHONHASHSEED = 1
build-files = $(shell find . -name 'build.py') $(wildcard build/*.py) $(wildcard config.py)
$(OBJ)/build.mk: Makefile $(build-files)
$(OBJ)/build.mk: Makefile $(build-files) build/ab.mk
@echo "AB"
@mkdir -p $(OBJ)
$(hide) $(PYTHON) -X pycache_prefix=$(OBJ) build/ab.py $(patsubst %,-t %,$(TARGETS)) -o $@ \
build.py || rm -f $@
$(hide) $(PYTHON) -X pycache_prefix=$(OBJ)/__pycache__ build/ab.py -o $@ build.py \
|| rm -f $@

View File

@@ -1,136 +1,82 @@
from collections.abc import Iterable, Sequence
from os.path import *
from types import SimpleNamespace
from pathlib import Path
from typing import Iterable
import argparse
import builtins
from copy import copy
import functools
import importlib
import importlib.abc
import importlib.util
from importlib.machinery import (
SourceFileLoader,
PathFinder,
ModuleSpec,
)
import inspect
import re
import sys
import builtins
import string
import fnmatch
import traceback
import sys
import hashlib
defaultGlobals = {}
targets = {}
unmaterialisedTargets = set()
materialisingStack = []
outputFp = None
verbose = False
quiet = False
cwdStack = [""]
targets = {}
unmaterialisedTargets = {} # dict, not set, to get consistent ordering
materialisingStack = []
defaultGlobals = {}
sys.path += ["."]
old_import = builtins.__import__
def new_import(name, *args, **kwargs):
if name not in sys.modules:
path = name.replace(".", "/") + ".py"
if isfile(path):
sys.stderr.write(f"loading {path}\n")
loader = importlib.machinery.SourceFileLoader(name, path)
class PathFinderImpl(PathFinder):
def find_spec(self, fullname, path, target=None):
if not path:
path = ["."]
if len(path) != 1:
return None
spec = importlib.util.spec_from_loader(
name, loader, origin="built-in"
try:
path = relpath(path[0])
except ValueError:
return None
realpath = fullname.replace(".", "/")
buildpath = realpath + ".py"
if isfile(buildpath):
spec = importlib.util.spec_from_file_location(
name=fullname,
location=buildpath,
loader=BuildFileLoaderImpl(fullname=fullname, path=buildpath),
submodule_search_locations=[],
)
module = importlib.util.module_from_spec(spec)
sys.modules[name] = module
cwdStack.append(dirname(path))
spec.loader.exec_module(module)
cwdStack.pop()
return old_import(name, *args, **kwargs)
return spec
if isdir(realpath):
return ModuleSpec(fullname, None, origin=realpath, is_package=True)
return None
builtins.__import__ = new_import
class BuildFileLoaderImpl(SourceFileLoader):
def exec_module(self, module):
sourcepath = relpath(module.__file__)
if not quiet:
print("loading", sourcepath)
cwdStack.append(dirname(sourcepath))
super(SourceFileLoader, self).exec_module(module)
cwdStack.pop()
sys.meta_path.insert(0, PathFinderImpl())
class ABException(BaseException):
pass
class Invocation:
name = None
callback = None
types = None
ins = None
outs = None
binding = None
traits = None
attr = None
attrdeps = None
def __init__(self):
self.attr = SimpleNamespace()
self.attrdeps = SimpleNamespace()
self.traits = set()
def __eq__(self, other):
return self.name is other.name
def __hash__(self):
return id(self.name)
def materialise(self, replacing=False):
if self in unmaterialisedTargets:
if not replacing and (self in materialisingStack):
print("Found dependency cycle:")
for i in materialisingStack:
print(f" {i.name}")
print(f" {self.name}")
sys.exit(1)
materialisingStack.append(self)
# Perform type conversion to the declared rule parameter types.
try:
self.args = {}
for k, v in self.binding.arguments.items():
if k != "kwargs":
t = self.types.get(k, None)
if t:
v = t(v).convert(self)
self.args[k] = v
else:
for kk, vv in v.items():
t = self.types.get(kk, None)
if t:
vv = t(vv).convert(self)
self.args[kk] = vv
# Actually call the callback.
cwdStack.append(self.cwd)
self.callback(**self.args)
cwdStack.pop()
except BaseException as e:
print(f"Error materialising {self}: {self.callback}")
print(f"Arguments: {self.args}")
raise e
if self.outs is None:
raise ABException(f"{self.name} didn't set self.outs")
if self in unmaterialisedTargets:
unmaterialisedTargets.remove(self)
materialisingStack.pop()
def bubbleattr(self, attr, xs):
xs = targetsof(xs, cwd=self.cwd)
a = set()
if hasattr(self.attrdeps, attr):
a = getattr(self.attrdeps, attr)
for x in xs:
a.add(x)
setattr(self.attrdeps, attr, a)
def __repr__(self):
return "'%s'" % self.name
def error(message):
raise ABException(message)
def Rule(func):
@@ -139,303 +85,376 @@ def Rule(func):
@functools.wraps(func)
def wrapper(*, name=None, replaces=None, **kwargs):
cwd = None
if name:
if ("+" in name) and not name.startswith("+"):
(cwd, _) = name.split("+", 1)
if "cwd" in kwargs:
cwd = kwargs["cwd"]
del kwargs["cwd"]
if not cwd:
cwd = cwdStack[-1]
if replaces:
cwd = replaces.cwd
else:
cwd = cwdStack[-1]
if name:
i = Invocation()
if name.startswith("./"):
name = join(cwd, name)
elif "+" not in name:
name = join(cwd, "+" + name)
if name[0] != "+":
name = "+" + name
t = Target(cwd, join(cwd, name))
i.name = name
i.localname = name.split("+")[-1]
if name in targets:
raise ABException(f"target {i.name} has already been defined")
targets[name] = i
assert (
t.name not in targets
), f"target {t.name} has already been defined"
targets[t.name] = t
elif replaces:
i = replaces
name = i.name
t = replaces
else:
raise ABException("you must supply either 'name' or 'replaces'")
i.cwd = cwd
i.sentinel = "$(OBJ)/.sentinels/" + name + ".mark"
i.types = func.__annotations__
i.callback = func
i.traits.add(func.__name__)
t.cwd = cwd
t.types = func.__annotations__
t.callback = func
t.traits.add(func.__name__)
if "args" in kwargs:
t.args |= kwargs["args"]
del kwargs["args"]
if "traits" in kwargs:
t.traits |= kwargs["traits"]
del kwargs["traits"]
i.binding = sig.bind(name=name, self=i, **kwargs)
i.binding.apply_defaults()
t.binding = sig.bind(name=name, self=t, **kwargs)
t.binding.apply_defaults()
unmaterialisedTargets.add(i)
unmaterialisedTargets[t] = None
if replaces:
i.materialise(replacing=True)
return i
t.materialise(replacing=True)
return t
defaultGlobals[func.__name__] = wrapper
return wrapper
class Type:
def __init__(self, value):
self.value = value
class List(Type):
def convert(self, invocation):
value = self.value
if not value:
return []
if type(value) is str:
return [value]
return list(value)
class Targets(Type):
def convert(self, invocation):
value = self.value
if not value:
return []
if type(value) is str:
value = [value]
if type(value) is list:
value = targetsof(value, cwd=invocation.cwd)
return value
class Target(Type):
def convert(self, invocation):
value = self.value
if not value:
return None
return targetof(value, cwd=invocation.cwd)
class TargetsMap(Type):
def convert(self, invocation):
value = self.value
if not value:
return {}
if type(value) is dict:
return {
k: targetof(v, cwd=invocation.cwd) for k, v in value.items()
}
raise ABException(f"wanted a dict of targets, got a {type(value)}")
def flatten(*xs):
def recurse(xs):
for x in xs:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
yield from recurse(x)
else:
yield x
return list(recurse(xs))
def fileinvocation(s):
i = Invocation()
i.name = s
i.outs = [s]
targets[s] = i
return i
def targetof(s, cwd=None):
if isinstance(s, Invocation):
s.materialise()
return s
if type(s) != str:
raise ABException("parameter of targetof is not a single target")
if s in targets:
t = targets[s]
t.materialise()
return t
if s.startswith("."):
if cwd == None:
raise ABException(
"relative target names can't be used in targetof without supplying cwd"
)
if s.startswith(".+"):
s = cwd + s[1:]
elif s.startswith("./"):
s = normpath(join(cwd, s))
elif s.endswith("/"):
return fileinvocation(s)
elif s.startswith("$"):
return fileinvocation(s)
if "+" not in s:
if isdir(s):
s = s + "+" + basename(s)
else:
return fileinvocation(s)
(path, target) = s.split("+", 2)
s = join(path, "+" + target)
loadbuildfile(join(path, "build.py"))
if not s in targets:
raise ABException(
f"build file at {path} doesn't contain +{target} when trying to resolve {s}"
)
i = targets[s]
i.materialise()
return i
def targetsof(*xs, cwd=None):
return flatten([targetof(x, cwd) for x in flatten(xs)])
def filenamesof(*xs):
s = []
for t in flatten(xs):
if type(t) == str:
t = normpath(t)
s += [t]
else:
s += [f for f in [normpath(f) for f in filenamesof(t.outs)]]
return s
def filenamesmatchingof(xs, pattern):
return fnmatch.filter(filenamesof(xs), pattern)
def targetswithtraitsof(xs, trait):
return [target for target in targetsof(xs) if trait in target.traits]
def targetnamesof(*xs):
s = []
for x in flatten(xs):
if type(x) == str:
x = normpath(x)
if x not in s:
s += [x]
else:
if x.name not in s:
s += [x.name]
return s
def filenameof(x):
xs = filenamesof(x)
if len(xs) != 1:
raise ABException("expected a single item")
return xs[0]
def bubbledattrsof(x, attr):
x = targetsof(x)
alltargets = set()
pending = set(x) if isinstance(x, Iterable) else {x}
while pending:
t = pending.pop()
if t not in alltargets:
alltargets.add(t)
if hasattr(t.attrdeps, attr):
pending.update(getattr(t.attrdeps, attr))
values = []
for t in alltargets:
if hasattr(t.attr, attr):
values += getattr(t.attr, attr)
return values
def stripext(path):
return splitext(path)[0]
def emit(*args):
outputFp.write(" ".join(flatten(args)))
outputFp.write("\n")
def templateexpand(s, invocation):
class Formatter(string.Formatter):
def get_field(self, name, a1, a2):
return (
eval(name, invocation.callback.__globals__, invocation.args),
False,
)
def format_field(self, value, format_spec):
if type(self) == str:
return value
return " ".join(
[templateexpand(f, invocation) for f in filenamesof(value)]
)
return Formatter().format(s)
def emitter_rule(rule, ins, outs, deps=[]):
emit("")
emit(".PHONY:", rule.name)
emit(rule.name, ":", rule.sentinel)
emit(
rule.sentinel,
# filenamesof(outs) if outs else [],
":",
filenamesof(ins),
filenamesof(deps),
def _isiterable(xs):
return isinstance(xs, Iterable) and not isinstance(
xs, (str, bytes, bytearray)
)
def emitter_endrule(rule, outs):
emit("\t$(hide) mkdir -p", dirname(rule.sentinel))
emit("\t$(hide) touch", rule.sentinel)
class Target:
def __init__(self, cwd, name):
if verbose:
print("rule('%s', cwd='%s'" % (name, cwd))
self.name = name
self.localname = self.name.rsplit("+")[-1]
self.traits = set()
self.dir = join("$(OBJ)", name)
self.ins = []
self.outs = []
self.materialised = False
self.args = {}
for f in filenamesof(outs):
emit(".SECONDARY:", f)
emit(f, ":", rule.sentinel, ";")
def __eq__(self, other):
return self.name is other.name
def __lt__(self, other):
return self.name < other.name
def __hash__(self):
return id(self)
def __repr__(self):
return f"Target('{self.name}')"
def templateexpand(selfi, s):
class Formatter(string.Formatter):
def get_field(self, name, a1, a2):
return (
eval(name, selfi.callback.__globals__, selfi.args),
False,
)
def format_field(self, value, format_spec):
if not value:
return ""
if type(value) == str:
return value
if _isiterable(value):
value = list(value)
if type(value) != list:
value = [value]
return " ".join(
[selfi.templateexpand(f) for f in filenamesof(value)]
)
return Formatter().format(s)
def materialise(self, replacing=False):
if self not in unmaterialisedTargets:
return
if not replacing and self in materialisingStack:
print("Found dependency cycle:")
for i in materialisingStack:
print(f" {i.name}")
print(f" {self.name}")
sys.exit(1)
materialisingStack.append(self)
# Perform type conversion to the declared rule parameter types.
try:
for k, v in self.binding.arguments.items():
if k != "kwargs":
t = self.types.get(k, None)
if t:
v = t.convert(v, self)
self.args[k] = copy(v)
else:
for kk, vv in v.items():
t = self.types.get(kk, None)
if t:
vv = t.convert(v, self)
self.args[kk] = copy(vv)
self.args["name"] = self.name
self.args["dir"] = self.dir
self.args["self"] = self
# Actually call the callback.
cwdStack.append(self.cwd)
if "kwargs" in self.binding.arguments.keys():
# If the caller wants kwargs, return all arguments except the standard ones.
cbargs = {
k: v for k, v in self.args.items() if k not in {"dir"}
}
else:
# Otherwise, just call the callback with the ones it asks for.
cbargs = {}
for k in self.binding.arguments.keys():
if k != "kwargs":
try:
cbargs[k] = self.args[k]
except KeyError:
error(
f"invocation of {self} failed because {k} isn't an argument"
)
self.callback(**cbargs)
cwdStack.pop()
except BaseException as e:
print(f"Error materialising {self}: {self.callback}")
print(f"Arguments: {self.args}")
raise e
if self.outs is None:
raise ABException(f"{self.name} didn't set self.outs")
if self in unmaterialisedTargets:
del unmaterialisedTargets[self]
materialisingStack.pop()
self.materialised = True
def convert(value, target):
if not value:
return None
return target.targetof(value)
def targetof(self, value):
if isinstance(value, str) and (value[0] == "="):
value = join(self.dir, value[1:])
return targetof(value, self.cwd)
def emitter_label(s):
emit("\t$(hide)", "$(ECHO)", s)
def _filetarget(value, cwd):
if value in targets:
return targets[value]
t = Target(cwd, value)
t.outs = [value]
targets[value] = t
return t
def emitter_exec(cs):
for c in cs:
emit("\t$(hide)", c)
def targetof(value, cwd=None):
if not cwd:
cwd = cwdStack[-1]
if isinstance(value, Path):
value = value.as_posix()
if isinstance(value, Target):
t = value
else:
assert (
value[0] != "="
), "can only use = for targets associated with another target"
if value.startswith("."):
# Check for local rule.
if value.startswith(".+"):
value = normpath(join(cwd, value[1:]))
# Check for local path.
elif value.startswith("./"):
value = normpath(join(cwd, value))
# Explicit directories are always raw files.
elif value.endswith("/"):
return _filetarget(value, cwd)
# Anything starting with a variable expansion is always a raw file.
elif value.startswith("$"):
return _filetarget(value, cwd)
# If this is not a rule lookup...
if "+" not in value:
# ...and if the value is pointing at a directory without a trailing /,
# it's a shorthand rule lookup.
if isdir(value):
value = value + "+" + basename(value)
# Otherwise it's an absolute file.
else:
return _filetarget(value, cwd)
# At this point we have the fully qualified name of a rule.
(path, target) = value.rsplit("+", 1)
value = join(path, "+" + target)
if value not in targets:
# Load the new build file.
path = join(path, "build.py")
try:
loadbuildfile(path)
except ModuleNotFoundError:
error(
f"no such build file '{path}' while trying to resolve '{value}'"
)
assert (
value in targets
), f"build file at '{path}' doesn't contain '+{target}' when trying to resolve '{value}'"
t = targets[value]
t.materialise()
return t
def unmake(*ss):
return [
re.sub(r"\$\(([^)]*)\)", r"$\1", s) for s in flatten(filenamesof(ss))
]
class Targets:
def convert(value, target):
if not value:
return []
assert _isiterable(value), "cannot convert non-list to Targets"
return [target.targetof(x) for x in flatten(value)]
class TargetsMap:
def convert(value, target):
if not value:
return {}
output = {k: target.targetof(v) for k, v in value.items()}
for k, v in output.items():
assert (
len(filenamesof([v])) == 1
), f"targets of a TargetsMap used as an argument of {target} with key '{k}' must contain precisely one output file, but was {filenamesof([v])}"
return output
def loadbuildfile(filename):
filename = filename.replace("/", ".").removesuffix(".py")
builtins.__import__(filename)
def flatten(items):
def generate(xs):
for x in xs:
if _isiterable(x):
yield from generate(x)
else:
yield x
return list(generate(items))
def targetnamesof(items):
assert _isiterable(items), "argument of filenamesof is not a collection"
return [t.name for t in items]
def filenamesof(items):
assert _isiterable(items), "argument of filenamesof is not a collection"
def generate(xs):
for x in xs:
if isinstance(x, Target):
yield from generate(x.outs)
else:
yield x
return list(generate(items))
def filenameof(x):
xs = filenamesof(x.outs)
assert (
len(xs) == 1
), f"tried to use filenameof() on {x} which does not have exactly one output: {x.outs}"
return xs[0]
def emit(*args, into=None):
s = " ".join(args) + "\n"
if into is not None:
into += [s]
else:
outputFp.write(s)
def emit_rule(name, ins, outs, cmds=[], label=None):
fins = filenamesof(ins)
fouts = filenamesof(outs)
nonobjs = [f for f in fouts if not f.startswith("$(OBJ)")]
emit("")
lines = []
if nonobjs:
emit("clean::", into=lines)
emit("\t$(hide) rm -f", *nonobjs, into=lines)
emit(".PHONY:", name, into=lines)
if outs:
emit(name, ":", *fouts, into=lines)
emit(*fouts, "&:" if len(fouts) > 1 else ":", *fins, "\x01", into=lines)
if label:
emit("\t$(hide)", "$(ECHO) $(PROGRESSINFO) ", label, into=lines)
for c in cmds:
emit("\t$(hide)", c, into=lines)
else:
assert len(cmds) == 0, "rules with no outputs cannot have commands"
emit(name, ":", *fins, into=lines)
cmd = "".join(lines)
hash = hashlib.sha1(bytes(cmd, "utf-8")).hexdigest()
outputFp.write(cmd.replace("\x01", f"$(OBJ)/.hashes/{hash}"))
if outs:
emit(f"$(OBJ)/.hashes/{hash}:")
emit(
f"\t$(hide) mkdir -p $(OBJ)/.hashes && touch $(OBJ)/.hashes/{hash}"
)
emit("")
@Rule
def simplerule(
self,
name,
ins: Targets = None,
outs: List = [],
deps: Targets = None,
commands: List = [],
ins: Targets = [],
outs: Targets = [],
deps: Targets = [],
commands=[],
label="RULE",
**kwargs,
):
self.ins = ins
self.outs = outs
self.deps = deps
emitter_rule(self, ins + deps, outs)
emitter_label(templateexpand("{label} {name}", self))
dirs = []
cs = []
@@ -447,100 +466,69 @@ def simplerule(
cs = [("mkdir -p %s" % dir) for dir in dirs]
for c in commands:
cs += [templateexpand(c, self)]
cs += [self.templateexpand(c)]
emitter_exec(cs)
emitter_endrule(self, outs)
@Rule
def normalrule(
self,
name=None,
ins: Targets = None,
deps: Targets = None,
outs: List = [],
label="RULE",
objdir=None,
commands: List = [],
**kwargs,
):
objdir = objdir or join("$(OBJ)", name)
self.attr.objdir = objdir
simplerule(
replaces=self,
ins=ins,
deps=deps,
outs=[join(objdir, f) for f in outs],
label=label,
commands=commands,
**kwargs,
emit_rule(
name=self.name,
ins=ins + deps,
outs=outs,
label=self.templateexpand("{label} {name}"),
cmds=cs,
)
@Rule
def export(self, name=None, items: TargetsMap = {}, deps: Targets = None):
cs = []
self.ins = []
self.outs = []
def export(self, name=None, items: TargetsMap = {}, deps: Targets = []):
ins = []
outs = []
for dest, src in items.items():
destf = filenameof(dest)
dir = dirname(destf)
dest = self.targetof(dest)
outs += [dest]
srcs = filenamesof(src)
if len(srcs) != 1:
raise ABException(
"a dependency of an export must have exactly one output file"
)
destf = filenameof(dest)
srcs = filenamesof([src])
assert (
len(srcs) == 1
), "a dependency of an exported file must have exactly one output file"
subrule = simplerule(
name=self.name + "/+" + destf,
name=f"{self.localname}/{destf}",
cwd=self.cwd,
ins=[srcs[0]],
outs=[destf],
commands=["cp %s %s" % (srcs[0], destf)],
label="CP",
)
subrule.materialise()
emit("clean::")
emit("\t$(hide) rm -f", destf)
self.ins += [subrule]
emitter_rule(
self,
self.ins,
self.outs,
[(d.outs if d.outs else d.sentinel) for d in deps],
simplerule(
replaces=self,
ins=outs + deps,
outs=["=sentinel"],
commands=["touch {outs[0]}"],
label="EXPORT",
)
emitter_endrule(self, self.outs)
def loadbuildfile(filename):
filename = filename.replace("/", ".").removesuffix(".py")
builtins.__import__(filename)
def load(filename):
loadbuildfile(filename)
callerglobals = inspect.stack()[1][0].f_globals
for k, v in defaultGlobals.items():
callerglobals[k] = v
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--verbose", action="store_true")
parser.add_argument("-q", "--quiet", action="store_true")
parser.add_argument("-o", "--output")
parser.add_argument("files", nargs="+")
parser.add_argument("-t", "--targets", action="append")
args = parser.parse_args()
if not args.targets:
raise ABException("no targets supplied")
global verbose
verbose = args.verbose
global quiet
quiet = args.quiet
global outputFp
outputFp = open(args.output, "wt")
for k in ("Rule", "Targets", "load", "filenamesof", "stripext"):
for k in ["Rule"]:
defaultGlobals[k] = globals()[k]
global __name__
@@ -550,12 +538,9 @@ def main():
for f in args.files:
loadbuildfile(f)
for t in flatten([a.split(",") for a in args.targets]):
(path, target) = t.split("+", 2)
s = join(path, "+" + target)
if s not in targets:
raise ABException("target %s is not defined" % s)
targets[s].materialise()
while unmaterialisedTargets:
t = next(iter(unmaterialisedTargets))
t.materialise()
emit("AB_LOADED = 1\n")

View File

@@ -1,21 +1,19 @@
from os.path import basename, join
from build.ab import (
ABException,
List,
Rule,
Targets,
TargetsMap,
filenameof,
filenamesmatchingof,
filenamesof,
flatten,
normalrule,
bubbledattrsof,
simplerule,
)
from build.utils import (
filenamesmatchingof,
stripext,
targetswithtraitsof,
collectattrs,
)
from os.path import *
from types import SimpleNamespace
class Toolchain:
@@ -39,16 +37,18 @@ class HostToolchain:
def cfileimpl(self, name, srcs, deps, suffix, commands, label, kind, cflags):
outleaf = stripext(basename(filenameof(srcs[0]))) + suffix
outleaf = "=" + stripext(basename(filenameof(srcs[0]))) + suffix
normalrule(
cflags = collectattrs(targets=deps, name="caller_cflags", initial=cflags)
t = simplerule(
replaces=self,
ins=srcs,
deps=deps,
outs=[outleaf],
label=label,
commands=commands,
cflags=cflags + bubbledattrsof(deps, "caller_cflags"),
args={"cflags": cflags},
)
@@ -58,7 +58,7 @@ def cfile(
name,
srcs: Targets = None,
deps: Targets = None,
cflags: List = [],
cflags=[],
suffix=".o",
toolchain=Toolchain,
commands=None,
@@ -77,7 +77,7 @@ def cxxfile(
name,
srcs: Targets = None,
deps: Targets = None,
cflags: List = [],
cflags=[],
suffix=".o",
toolchain=Toolchain,
commands=None,
@@ -92,7 +92,11 @@ def cxxfile(
)
def findsources(name, srcs, deps, cflags, toolchain, filerule):
def findsources(name, srcs, deps, cflags, toolchain, filerule, cwd):
headers = filenamesmatchingof(srcs, "*.h")
cflags = cflags + ["-I" + dirname(h) for h in headers]
deps = deps + headers
objs = []
for s in flatten(srcs):
objs += [
@@ -102,15 +106,16 @@ def findsources(name, srcs, deps, cflags, toolchain, filerule):
deps=deps,
cflags=cflags,
toolchain=toolchain,
cwd=cwd,
)
for f in filenamesof(s)
for f in filenamesof([s])
if f.endswith(".c")
or f.endswith(".cc")
or f.endswith(".cpp")
or f.endswith(".S")
or f.endswith(".s")
]
if any(f.endswith(".o") for f in filenamesof(s)):
if any(f.endswith(".o") for f in filenamesof([s])):
objs += [s]
return objs
@@ -121,7 +126,7 @@ def cheaders(
self,
name,
hdrs: TargetsMap = None,
caller_cflags: List = None,
caller_cflags=[],
deps: Targets = None,
):
cs = []
@@ -129,27 +134,24 @@ def cheaders(
outs = []
i = 0
for dest, src in hdrs.items():
s = filenamesof(src)
if len(s) != 1:
raise ABException(
"the target of a header must return exactly one file"
)
s = filenamesof([src])
assert (
len(s) == 1
), "the target of a header must return exactly one file"
cs += ["cp {ins[" + str(i) + "]} {outs[" + str(i) + "]}"]
outs += [dest]
outs += ["=" + dest]
i = i + 1
r = normalrule(
r = simplerule(
replaces=self,
ins=ins,
outs=outs,
commands=cs,
deps=deps,
label="CHEADERS",
args={"caller_cflags": caller_cflags + ["-I" + self.dir]},
)
r.materialise()
self.attr.caller_cflags = caller_cflags + ["-I" + r.attr.objdir]
self.bubbleattr("caller_cflags", deps)
def libraryimpl(
@@ -187,28 +189,33 @@ def libraryimpl(
deps = deps + [hr]
objs = findsources(
name,
self.localname,
srcs,
targetswithtraitsof(deps, "cheaders"),
cflags + bubbledattrsof(deps, "caller_cflags"),
cflags,
toolchain,
kind,
self.cwd,
)
normalrule(
simplerule(
replaces=self,
ins=objs,
outs=[basename(name) + ".a"],
outs=[f"={self.localname}.a"],
label=label,
commands=commands,
args={
"caller_cflags": collectattrs(
targets=deps + ([hr] if hr else []), name="caller_cflags"
),
"caller_ldflags": collectattrs(
targets=deps, name="caller_ldflags", initial=caller_ldflags
),
},
traits={"cheaders"},
)
self.outs = self.outs + (hr.outs if hr else [])
self.traits.add("cheaders")
self.attr.caller_ldflags = caller_ldflags
self.bubbleattr("caller_ldflags", deps)
self.bubbleattr("caller_cflags", deps)
@Rule
def clibrary(
@@ -217,10 +224,10 @@ def clibrary(
srcs: Targets = None,
deps: Targets = None,
hdrs: TargetsMap = None,
caller_cflags: List = [],
caller_ldflags: List = [],
cflags: List = [],
ldflags: List = [],
caller_cflags=[],
caller_ldflags=[],
cflags=[],
ldflags=[],
toolchain=Toolchain,
commands=None,
label=None,
@@ -254,10 +261,10 @@ def cxxlibrary(
srcs: Targets = None,
deps: Targets = None,
hdrs: TargetsMap = None,
caller_cflags: List = [],
caller_ldflags: List = [],
cflags: List = [],
ldflags: List = [],
caller_cflags=[],
caller_ldflags=[],
cflags=[],
ldflags=[],
toolchain=Toolchain,
commands=None,
label=None,
@@ -297,18 +304,22 @@ def programimpl(
kind,
):
ars = filenamesmatchingof(deps, "*.a")
deps = deps + filenamesmatchingof(srcs, "*.h")
ldflags = ldflags + bubbledattrsof(deps, "caller_ldflags")
cfiles = findsources(name, srcs, deps, cflags, toolchain, filerule)
normalrule(
cfiles = findsources(
self.localname, srcs, deps, cflags, toolchain, filerule, self.cwd
)
simplerule(
replaces=self,
ins=cfiles + ars + ars,
outs=[basename(name) + "$(EXT)"],
outs=[f"={self.localname}$(EXT)"],
deps=deps,
label=toolchain.label + label,
commands=commands,
ldflags=ldflags,
args={
"ldflags": collectattrs(
targets=deps, name="caller_ldflags", initial=ldflags
)
},
)
@@ -318,8 +329,8 @@ def cprogram(
name,
srcs: Targets = None,
deps: Targets = None,
cflags: List = [],
ldflags: List = [],
cflags=[],
ldflags=[],
toolchain=Toolchain,
commands=None,
label="CLINK",
@@ -349,8 +360,8 @@ def cxxprogram(
name,
srcs: Targets = None,
deps: Targets = None,
cflags: List = [],
ldflags: List = [],
cflags=[],
ldflags=[],
toolchain=Toolchain,
commands=None,
label="CXXLINK",

View File

@@ -1,4 +1,4 @@
from build.ab import Rule, emit, Target, bubbledattrsof, filenamesof
from build.ab import Rule, emit, Target, filenamesof
from types import SimpleNamespace
import os
import subprocess
@@ -14,68 +14,46 @@ HOST_PACKAGES := $(shell $(HOST_PKG_CONFIG) --list-all | cut -d' ' -f1 | sort)
)
@Rule
def package(self, name, package=None, fallback: Target = None):
emit("ifeq ($(filter %s, $(PACKAGES)),)" % package)
def _package(self, name, package, fallback, prefix=""):
emit(f"ifeq ($(filter {package}, $({prefix}PACKAGES)),)")
if fallback:
emit(f"PACKAGE_DEPS_{package} := ", filenamesof(fallback))
emit(f"{prefix}PACKAGE_DEPS_{package} := ", *filenamesof([fallback]))
emit(
f"PACKAGE_CFLAGS_{package} :=",
bubbledattrsof(fallback, "caller_cflags"),
f"{prefix}PACKAGE_CFLAGS_{package} :=",
*fallback.args.get("caller_cflags", []),
)
emit(
f"PACKAGE_LDFLAGS_{package} := ",
bubbledattrsof(fallback, "caller_ldflags"),
f"$(filter %.a, $(PACKAGE_DEPS_{package}))",
f"{prefix}PACKAGE_LDFLAGS_{package} := ",
*fallback.args.get("caller_ldflags", []),
f"$(filter %.a, $({prefix}PACKAGE_DEPS_{package}))",
)
else:
emit(f"$(error Required package '{package}' not installed.)")
emit("else")
emit(
f"PACKAGE_CFLAGS_{package} := $(shell $(PKG_CONFIG) --cflags {package})"
f"{prefix}PACKAGE_CFLAGS_{package} := $(shell $({prefix}PKG_CONFIG) --cflags {package})"
)
emit(
f"PACKAGE_LDFLAGS_{package} := $(shell $(PKG_CONFIG) --libs {package})"
f"{prefix}PACKAGE_LDFLAGS_{package} := $(shell $({prefix}PKG_CONFIG) --libs {package})"
)
emit(f"PACKAGE_DEPS_{package} :=")
emit(f"{prefix}PACKAGE_DEPS_{package} :=")
emit("endif")
emit(f"{self.name}:")
self.attr.caller_cflags = [f"$(PACKAGE_CFLAGS_{package})"]
self.attr.caller_ldflags = [f"$(PACKAGE_LDFLAGS_{package})"]
self.args["caller_cflags"] = [f"$({prefix}PACKAGE_CFLAGS_{package})"]
self.args["caller_ldflags"] = [f"$({prefix}PACKAGE_LDFLAGS_{package})"]
self.traits.add("clibrary")
self.traits.add("cheaders")
self.ins = []
self.outs = [f"$(PACKAGE_DEPS_{package})"]
self.outs = [f"$({prefix}PACKAGE_DEPS_{package})"]
@Rule
def package(self, name, package=None, fallback: Target = None):
_package(self, name, package, fallback)
@Rule
def hostpackage(self, name, package=None, fallback: Target = None):
emit("ifeq ($(filter %s, $(HOST_PACKAGES)),)" % package)
if fallback:
emit(
f"HOST_PACKAGE_CFLAGS_{package} :=",
bubbledattrsof(fallback, "caller_cflags"),
)
emit(
f"HOST_PACKAGE_LDFLAGS_{package} := ",
bubbledattrsof(fallback, "caller_ldflags"),
)
emit(f"HOST_PACKAGE_DEP_{package} := ", fallback.name)
else:
emit(f"$(error Required host package '{package}' not installed.)")
emit("else")
emit(
f"HOST_PACKAGE_CFLAGS_{package} := $(shell $(HOST_PKG_CONFIG) --cflags {package})"
)
emit(
f"HOST_PACKAGE_LDFLAGS_{package} := $(shell $(HOST_PKG_CONFIG) --libs {package})"
)
emit(f"HOST_PACKAGE_DEP_{package} := ")
emit("endif")
self.attr.caller_cflags = [f"$(HOST_PACKAGE_CFLAGS_{package})"]
self.attr.caller_ldflags = [f"$(HOST_PACKAGE_LDFLAGS_{package})"]
self.ins = []
self.outs = [f"$(HOST_PACKAGE_DEP_{package})"]
_package(self, name, package, fallback, "HOST_")

View File

@@ -1,17 +1,8 @@
from os.path import join
from build.ab import (
Rule,
Targets,
emit,
normalrule,
filenamesof,
filenamesmatchingof,
bubbledattrsof,
targetswithtraitsof,
)
from build.c import cxxlibrary
from build.ab import Rule, Targets, emit, simplerule, filenamesof
from build.utils import filenamesmatchingof, collectattrs
from types import SimpleNamespace
import build.pkg
from os.path import join
import build.pkg # to get the protobuf package check
emit(
"""
@@ -24,49 +15,85 @@ endif
@Rule
def proto(self, name, srcs: Targets = None, deps: Targets = None):
normalrule(
def proto(self, name, srcs: Targets = [], deps: Targets = []):
simplerule(
replaces=self,
ins=srcs,
outs=[f"{name}.descriptor"],
outs=[f"={name}.descriptor"],
deps=deps,
commands=[
"$(PROTOC) --include_source_info --descriptor_set_out={outs[0]} {ins}"
],
label="PROTO",
args={"protosrcs": filenamesof(srcs)},
)
self.attr.protosrcs = filenamesof(srcs)
self.bubbleattr("protosrcs", deps)
@Rule
def protocc(self, name, srcs: Targets = None, deps: Targets = None):
def protocc(self, name, srcs: Targets = [], deps: Targets = []):
outs = []
protos = []
for f in filenamesmatchingof(bubbledattrsof(srcs, "protosrcs"), "*.proto"):
allsrcs = collectattrs(targets=srcs, name="protosrcs")
assert allsrcs, "no sources provided"
for f in filenamesmatchingof(allsrcs, "*.proto"):
cc = f.replace(".proto", ".pb.cc")
h = f.replace(".proto", ".pb.h")
protos += [f]
srcs += [f]
outs += [cc, h]
outs += ["=" + cc, "=" + h]
srcname = f"{name}_srcs"
objdir = join("$(OBJ)", srcname)
r = normalrule(
name=srcname,
r = simplerule(
name=f"{self.localname}_srcs",
cwd=self.cwd,
ins=protos,
outs=outs,
deps=deps,
commands=["$(PROTOC) --cpp_out={self.attr.objdir} {ins}"],
commands=["$(PROTOC) --cpp_out={dir} {ins}"],
label="PROTOCC",
)
headers = {f: join(objdir, f) for f in outs if f.endswith(".pb.h")}
headers = {f[1:]: join(r.dir, f[1:]) for f in outs if f.endswith(".pb.h")}
from build.c import cxxlibrary
cxxlibrary(
replaces=self,
srcs=[r],
deps=targetswithtraitsof(deps, "cheaders"),
deps=deps,
hdrs=headers,
)
@Rule
def protojava(self, name, srcs: Targets = [], deps: Targets = []):
outs = []
allsrcs = collectattrs(targets=srcs, name="protosrcs")
assert allsrcs, "no sources provided"
protos = []
for f in filenamesmatchingof(allsrcs, "*.proto"):
protos += [f]
srcs += [f]
r = simplerule(
name=f"{self.localname}_srcs",
cwd=self.cwd,
ins=protos,
outs=[f"={self.localname}.srcjar"],
deps=deps,
commands=[
"mkdir -p {dir}/srcs",
"$(PROTOC) --java_out={dir}/srcs {ins}",
"$(JAR) cf {outs[0]} -C {dir}/srcs .",
],
traits={"srcjar"},
label="PROTOJAVA",
)
from build.java import javalibrary
javalibrary(
replaces=self,
deps=[r] + deps,
)

View File

@@ -1,13 +1,63 @@
from build.ab import Rule, normalrule, Target, filenameof, Targets
from os.path import basename
from build.ab import (
Rule,
Target,
Targets,
filenameof,
filenamesof,
cwdStack,
error,
simplerule,
)
from os.path import relpath, splitext, join, basename, isfile
from glob import iglob
import fnmatch
import itertools
def filenamesmatchingof(xs, pattern):
return fnmatch.filter(filenamesof(xs), pattern)
def stripext(path):
return splitext(path)[0]
def targetswithtraitsof(xs, trait):
return [t for t in xs if trait in t.traits]
def collectattrs(*, targets, name, initial=[]):
s = set(initial)
for a in [t.args.get(name, []) for t in targets]:
s.update(a)
return sorted(list(s))
def itemsof(pattern, root=None, cwd=None):
if not cwd:
cwd = cwdStack[-1]
if not root:
root = "."
pattern = join(cwd, pattern)
root = join(cwd, root)
result = {}
for f in iglob(pattern, recursive=True):
try:
if isfile(f):
result[relpath(f, root)] = f
except ValueError:
error(f"file '{f}' is not in root '{root}'")
return result
@Rule
def objectify(self, name, src: Target, symbol):
normalrule(
simplerule(
replaces=self,
ins=["build/_objectify.py", src],
outs=[basename(filenameof(src)) + ".h"],
outs=[f"={basename(filenameof(src))}.h"],
commands=["$(PYTHON) {ins[0]} {ins[1]} " + symbol + " > {outs}"],
label="OBJECTIFY",
)
@@ -24,19 +74,19 @@ def test(
label="TEST",
):
if command:
normalrule(
simplerule(
replaces=self,
ins=[command],
outs=["sentinel"],
outs=["=sentinel"],
commands=["{ins[0]}", "touch {outs}"],
deps=deps,
label=label,
)
else:
normalrule(
simplerule(
replaces=self,
ins=ins,
outs=["sentinel"],
outs=["=sentinel"],
commands=commands + ["touch {outs}"],
deps=deps,
label=label,

37
build/zip.py Normal file
View File

@@ -0,0 +1,37 @@
from build.ab import (
Rule,
simplerule,
TargetsMap,
filenameof,
emit,
)
emit(
"""
ZIP ?= zip
ZIPNOTE ?= zipnote
"""
)
@Rule
def zip(
self, name, flags="", items: TargetsMap = {}, extension="zip", label="ZIP"
):
cs = ["rm -f {outs[0]}"]
ins = []
for k, v in items.items():
cs += [
"cat %s | $(ZIP) -q %s {outs[0]} -" % (filenameof(v), flags),
"printf '@ -\\n@=%s\\n' | $(ZIPNOTE) -w {outs[0]}" % k,
]
ins += [v]
simplerule(
replaces=self,
ins=ins,
outs=[f"={self.localname}." + extension],
commands=cs,
label=label,
)

View File

@@ -7,21 +7,21 @@ Brother word processor disks are weird, using custom tooling and chipsets.
They are completely not PC compatible in every possible way other than the
size.
Different word processors use different disk formats --- the only ones
supported by FluxEngine are the 120kB and 240kB 3.5" formats. The default
options are for the 240kB format. For the 120kB format, which is 40 track, do
`fluxengine read brother -s :t=1-79x2`.
Different word processors use different disk formats --- the only ones supported
by FluxEngine are the 120kB and 240kB 3.5" formats. Use the `--120` and `--240`
options to select which one.
Apparently about 20% of Brother word processors have alignment issues which
means that the disks can't be read by FluxEngine (because the tracks on the
disk don't line up with the position of the head in a PC drive). The word
processors themselves solved this by microstepping until they found where the
real track is, but normal PC drives aren't capable of doing this. Particularly
with the 120kB disks, you might want to fiddle with the start track (e.g.
`:t=0-79x2`) to get a clean read. Keep an eye on the bad sector map that's
dumped at the end of a read. My word processor likes to put logical track 0 on
physical track 3, which means that logical track 77 is on physical track 80;
luckily my PC drive can access track 80.
means that the disks can't be read by FluxEngine (because the tracks on the disk
don't line up with the position of the head in a PC drive). The word processors
themselves solved this by microstepping until they found where the real track
is, but normal PC drives aren't capable of doing this. Particularly with the
120kB disks, you might want to fiddle with the head bias (e.g.
`--drive.head_bias=3`) to get a clean read. Keep an eye on the bad sector map
that's dumped at the end of a read. My word processor likes to put logical track
0 on physical track 3, which means that logical track 77 is on physical track
80, so I need that `head_bias` value of 3; luckily my PC drive can access track
80.
Using FluxEngine to *write* disks isn't a problem, so the
simplest solution is to use FluxEngine to create a new disk, with the tracks

48
doc/disk-tartu.md Normal file
View File

@@ -0,0 +1,48 @@
tartu
====
## The Palivere and variations
<!-- This file is automatically generated. Do not edit. -->
The Tartu Palivere is a 1988 Z80-based computer from Estonia. It is a CP/M
machine with 64kB of RAM, running off a 2MHz ꃣ0e30
clone; it operated off punched tape, cassette, external hard drive or floppy, and was notable as being the first ever computer with an Estonian keyboard.
<div style="text-align: center">
<img src="tartu.jpg" alt="The Tartu computer's developer Leo Humal working with one."/>
</div>
From a floppy disk perspective, it is interesting because the floppy drive
interface is almost entirely handled in software --- necessary at the time as
the usual floppy disk interface chip at the time, the ⎲fba5
of the WD1793), was hard to find. Instead, the floppy controller board was
implemented entirely using TTL logic. Despite this, the encoding is fairly high
density, using MFM and with up to 780kB on a double-sided 80 track disk.
<div style="text-align: center">
<img src="tartu-fdc.jpg" alt="The Tartu FDC with Soviet TTL logic chips."/>
</div>
FluxEngine supports reading and writing Tartu disks with CP/M filesystem access.
## Options
- Format variants:
- `390`: 390kB 5.25" 40-track DSDD
- `780`: 780kB 5.25" 80-track DSDD
## Examples
To read:
- `fluxengine read tartu --390 -s drive:0 -o tartu.img`
- `fluxengine read tartu --780 -s drive:0 -o tartu.img`
To write:
- `fluxengine write tartu --390 -d drive:0 -i tartu.img`
- `fluxengine write tartu --780 -d drive:0 -i tartu.img`
## References
- [The Estonia Museum of Electronics](https://www.elektroonikamuuseum.ee/tartu_arvuti_lugu.html)

BIN
doc/tartu-fdc.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 165 KiB

BIN
doc/tartu.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View File

@@ -1,4 +1,4 @@
from build.ab import normalrule, simplerule
from build.ab import simplerule, simplerule
from build.utils import objectify
from build.c import clibrary
@@ -14,10 +14,10 @@ clibrary(
},
)
normalrule(
simplerule(
name="fluxengine_iconset",
ins=["./icon.png"],
outs=["fluxengine.iconset"],
outs=["=fluxengine.iconset"],
commands=[
"mkdir -p {outs[0]}",
"sips -z 64 64 {ins[0]} --out {outs[0]}/icon_32x32@2x.png > /dev/null",
@@ -25,18 +25,18 @@ normalrule(
label="ICONSET",
)
normalrule(
simplerule(
name="fluxengine_icns",
ins=[".+fluxengine_iconset"],
outs=["fluxengine.icns"],
outs=["=fluxengine.icns"],
commands=["iconutil -c icns -o {outs[0]} {ins[0]}"],
label="ICONUTIL",
)
normalrule(
simplerule(
name="fluxengine_ico",
ins=["./icon.png"],
outs=["fluxengine.ico"],
outs=["=fluxengine.ico"],
commands=["png2ico {outs[0]} {ins[0]}"],
label="MAKEICON",
)

View File

@@ -24,5 +24,5 @@ proto(
protocc(
name="config_proto_lib",
srcs=[".+common_proto", ".+config_proto", "arch+arch_proto"],
srcs=[".+common_proto", ".+config_proto", "arch+arch_proto", "+fl2_proto"]
)

View File

@@ -18,6 +18,7 @@
#include "arch/northstar/northstar.h"
#include "arch/rolandd20/rolandd20.h"
#include "arch/smaky6/smaky6.h"
#include "arch/tartu/tartu.h"
#include "arch/tids990/tids990.h"
#include "arch/victor9k/victor9k.h"
#include "arch/zilogmcz/zilogmcz.h"
@@ -51,6 +52,7 @@ std::unique_ptr<Decoder> Decoder::create(const DecoderProto& config)
{DecoderProto::kNorthstar, createNorthstarDecoder },
{DecoderProto::kRolandd20, createRolandD20Decoder },
{DecoderProto::kSmaky6, createSmaky6Decoder },
{DecoderProto::kTartu, createTartuDecoder },
{DecoderProto::kTids990, createTids990Decoder },
{DecoderProto::kVictor9K, createVictor9kDecoder },
{DecoderProto::kZilogmcz, createZilogMczDecoder },
@@ -89,7 +91,7 @@ std::shared_ptr<TrackDataFlux> Decoder::decodeToSectors(
Fluxmap::Position recordStart = fmr.tell();
_sector->clock = advanceToNextRecord();
if (fmr.eof() || !_sector->clock)
return _trackdata;
break;
/* Read the sector record. */
@@ -108,28 +110,26 @@ std::shared_ptr<TrackDataFlux> Decoder::decodeToSectors(
{
/* The data is in a separate record. */
for (;;)
_sector->headerStartTime = before.ns();
_sector->headerEndTime = after.ns();
_sector->clock = advanceToNextRecord();
if (fmr.eof() || !_sector->clock)
break;
before = fmr.tell();
decodeDataRecord();
after = fmr.tell();
if (_sector->status != Sector::DATA_MISSING)
{
_sector->position = before.bytes;
_sector->dataStartTime = before.ns();
_sector->dataEndTime = after.ns();
pushRecord(before, after);
}
else
{
_sector->headerStartTime = before.ns();
_sector->headerEndTime = after.ns();
_sector->clock = advanceToNextRecord();
if (fmr.eof() || !_sector->clock)
break;
before = fmr.tell();
decodeDataRecord();
after = fmr.tell();
if (_sector->status != Sector::DATA_MISSING)
{
_sector->position = before.bytes;
_sector->dataStartTime = before.ns();
_sector->dataEndTime = after.ns();
pushRecord(before, after);
break;
}
fmr.skipToEvent(F_BIT_PULSE);
resetFluxDecoder();
}
@@ -142,6 +142,8 @@ std::shared_ptr<TrackDataFlux> Decoder::decodeToSectors(
_trackdata->sectors.push_back(_sector);
}
}
return _trackdata;
}
void Decoder::pushRecord(

View File

@@ -15,13 +15,14 @@ import "arch/mx/mx.proto";
import "arch/northstar/northstar.proto";
import "arch/rolandd20/rolandd20.proto";
import "arch/smaky6/smaky6.proto";
import "arch/tartu/tartu.proto";
import "arch/tids990/tids990.proto";
import "arch/victor9k/victor9k.proto";
import "arch/zilogmcz/zilogmcz.proto";
import "lib/fluxsink/fluxsink.proto";
import "lib/common.proto";
//NEXT: 32
//NEXT: 33
message DecoderProto {
optional double pulse_debounce_threshold = 1 [default = 0.30,
(help) = "ignore pulses with intervals shorter than this, in fractions of a clock"];
@@ -50,6 +51,7 @@ message DecoderProto {
NorthstarDecoderProto northstar = 24;
RolandD20DecoderProto rolandd20 = 31;
Smaky6DecoderProto smaky6 = 30;
TartuDecoderProto tartu = 32;
Tids990DecoderProto tids990 = 16;
Victor9kDecoderProto victor9k = 17;
ZilogMczDecoderProto zilogmcz = 18;

View File

@@ -11,6 +11,7 @@
#include "arch/macintosh/macintosh.h"
#include "arch/micropolis/micropolis.h"
#include "arch/northstar/northstar.h"
#include "arch/tartu/tartu.h"
#include "arch/tids990/tids990.h"
#include "arch/victor9k/victor9k.h"
#include "lib/encoders/encoders.pb.h"
@@ -24,8 +25,8 @@ std::unique_ptr<Encoder> Encoder::create(const EncoderProto& config)
static const std::map<int,
std::function<std::unique_ptr<Encoder>(const EncoderProto&)>>
encoders = {
{EncoderProto::kAmiga, createAmigaEncoder },
{EncoderProto::kAgat, createAgatEncoder },
{EncoderProto::kAmiga, createAmigaEncoder },
{EncoderProto::kApple2, createApple2Encoder },
{EncoderProto::kBrother, createBrotherEncoder },
{EncoderProto::kC64, createCommodore64Encoder},
@@ -33,6 +34,7 @@ std::unique_ptr<Encoder> Encoder::create(const EncoderProto& config)
{EncoderProto::kMacintosh, createMacintoshEncoder },
{EncoderProto::kMicropolis, createMicropolisEncoder },
{EncoderProto::kNorthstar, createNorthstarEncoder },
{EncoderProto::kTartu, createTartuEncoder },
{EncoderProto::kTids990, createTids990Encoder },
{EncoderProto::kVictor9K, createVictor9kEncoder },
};

View File

@@ -9,6 +9,7 @@ import "arch/ibm/ibm.proto";
import "arch/macintosh/macintosh.proto";
import "arch/micropolis/micropolis.proto";
import "arch/northstar/northstar.proto";
import "arch/tartu/tartu.proto";
import "arch/tids990/tids990.proto";
import "arch/victor9k/victor9k.proto";
@@ -27,5 +28,6 @@ message EncoderProto
Victor9kEncoderProto victor9k = 11;
Apple2EncoderProto apple2 = 12;
AgatEncoderProto agat = 13;
TartuEncoderProto tartu = 14;
}
}

View File

@@ -9,6 +9,7 @@
#include "lib/logger.h"
#include "lib/proto.h"
#include "lib/fluxmap.h"
#include "lib/layout.h"
#include "lib/a2r.h"
#include <fstream>
#include <sys/stat.h>
@@ -22,11 +23,6 @@ namespace
return ticks * NS_PER_TICK / A2R_NS_PER_TICK;
}
bool singlesided(void)
{
return globalConfig()->heads().start() == globalConfig()->heads().end();
}
class A2RFluxSink : public FluxSink
{
public:
@@ -35,11 +31,7 @@ namespace
_bytes{},
_writer{_bytes.writer()}
{
log("A2R: writing A2R {} file containing {} tracks\n",
singlesided() ? "single sided" : "double sided",
globalConfig()->tracks().end() -
globalConfig()->tracks().start() + 1);
log("A2R: collecting data");
time_t now{std::time(nullptr)};
auto t = gmtime(&now);
@@ -48,12 +40,19 @@ namespace
~A2RFluxSink()
{
auto locations = Layout::computeLocations();
Layout::getBounds(
locations, _minTrack, _maxTrack, _minSide, _maxSide);
log("A2R: writing A2R {} file containing {} tracks...",
(_minSide == _maxSide) ? "single sided" : "double sided",
_maxTrack - _minTrack + 1);
writeHeader();
writeInfo();
writeStream();
writeMeta();
log("A2R: writing output file...\n");
std::ofstream of(
_config.filename(), std::ios::out | std::ios::binary);
if (!of.is_open())
@@ -82,10 +81,10 @@ namespace
Bytes info;
auto writer = info.writer();
writer.write_8(A2R_INFO_CHUNK_VERSION);
auto version_str_padded = fmt::format("{: <32}", "Fluxengine");
auto version_str_padded = fmt::format("{: <32}", "FluxEngine");
assert(version_str_padded.size() == 32);
writer.append(version_str_padded);
writer.write_8(singlesided() ? A2R_DISK_525 : A2R_DISK_35);
writer.write_8(A2R_DISK_35);
writer.write_8(1); // write protected
writer.write_8(1); // synchronized
writeChunkAndData(A2R_CHUNK_INFO, info);
@@ -192,15 +191,15 @@ namespace
}
else
{
// We have an index, so this is real from a floppy and should be
// "one revolution plus a bit"
// We have an index, so this is a real read from a floppy and
// should be "one revolution plus a bit"
fmr.skipToEvent(F_BIT_INDEX);
write_flux();
}
uint32_t chunk_size = 10 + trackBytes.size();
_strmWriter.write_8(cylinder);
_strmWriter.write_8((cylinder << 1) | head);
_strmWriter.write_8(A2R_TIMING);
_strmWriter.write_le32(trackBytes.size());
_strmWriter.write_le32(ticks_to_a2r(loopPoint));
@@ -219,6 +218,10 @@ namespace
Bytes _strmBytes;
ByteWriter _strmWriter{_strmBytes.writer()};
std::map<std::string, std::string> _metadata;
int _minSide;
int _maxSide;
int _minTrack;
int _maxTrack;
};
} // namespace

View File

@@ -93,7 +93,7 @@ struct Sector : public LogicalLocation
template <>
struct fmt::formatter<Sector::Status> : formatter<string_view>
{
auto format(Sector::Status status, format_context& ctx)
auto format(Sector::Status status, format_context& ctx) const
{
return formatter<string_view>::format(
Sector::statusToString(status), ctx);

View File

@@ -42,11 +42,9 @@ public:
commtimeouts.ReadIntervalTimeout = 100;
SetCommTimeouts(_handle, &commtimeouts);
if (!EscapeCommFunction(_handle, CLRDTR))
error("Couldn't clear DTR: {}", get_last_error_string());
Sleep(200);
if (!EscapeCommFunction(_handle, SETDTR))
error("Couldn't set DTR: {}", get_last_error_string());
/* Toggle DTR to reset the device. */
toggleDtr();
PurgeComm(_handle,
PURGE_RXABORT | PURGE_RXCLEAR | PURGE_TXABORT | PURGE_TXCLEAR);
@@ -58,6 +56,15 @@ public:
}
public:
void toggleDtr() override
{
if (!EscapeCommFunction(_handle, CLRDTR))
error("Couldn't clear DTR: {}", get_last_error_string());
Sleep(200);
if (!EscapeCommFunction(_handle, SETDTR))
error("Couldn't set DTR: {}", get_last_error_string());
}
ssize_t readImpl(uint8_t* buffer, size_t len) override
{
DWORD rlen;
@@ -97,6 +104,8 @@ public:
.Parity = NOPARITY,
.StopBits = ONESTOPBIT};
SetCommState(_handle, &dcb);
toggleDtr();
}
private:
@@ -157,12 +166,7 @@ public:
/* Toggle DTR to reset the device. */
int flag = TIOCM_DTR;
if (ioctl(_fd, TIOCMBIC, &flag) == -1)
error("cannot clear DTR on serial port: {}", strerror(errno));
usleep(200000);
if (ioctl(_fd, TIOCMBIS, &flag) == -1)
error("cannot set DTR on serial port: {}", strerror(errno));
toggleDtr();
/* Flush pending input from a generic greaseweazel device */
tcsetattr(_fd, TCSAFLUSH, &t);
@@ -174,6 +178,16 @@ public:
}
public:
void toggleDtr() override
{
int flag = TIOCM_DTR;
if (ioctl(_fd, TIOCMBIC, &flag) == -1)
error("cannot clear DTR on serial port: {}", strerror(errno));
usleep(200000);
if (ioctl(_fd, TIOCMBIS, &flag) == -1)
error("cannot set DTR on serial port: {}", strerror(errno));
}
ssize_t readImpl(uint8_t* buffer, size_t len) override
{
ssize_t rlen = ::read(_fd, buffer, len);
@@ -198,6 +212,8 @@ public:
tcgetattr(_fd, &t);
cfsetspeed(&t, baudRate);
tcsetattr(_fd, TCSANOW, &t);
toggleDtr();
}
private:

View File

@@ -11,6 +11,7 @@ public:
virtual ssize_t readImpl(uint8_t* buffer, size_t len) = 0;
virtual ssize_t write(const uint8_t* buffer, size_t len) = 0;
virtual void setBaudRate(int baudRate) = 0;
virtual void toggleDtr() = 0;
void read(uint8_t* buffer, size_t len);
void read(Bytes& bytes);

View File

@@ -11,9 +11,12 @@
#include "lib/logger.h"
#include "greaseweazle.h"
static USB* usb = NULL;
static USB* usb = nullptr;
USB::~USB() {}
USB::~USB()
{
usb = nullptr;
}
static std::shared_ptr<CandidateDevice> selectDevice()
{
@@ -59,8 +62,12 @@ static std::shared_ptr<CandidateDevice> selectDevice()
exit(1);
}
USB* get_usb_impl()
std::unique_ptr<USB> USB::create()
{
std::unique_ptr<USB> r;
if (usb)
error("more than one USB object created");
/* Special case for certain configurations. */
if (globalConfig()->usb().has_greaseweazle() &&
@@ -68,33 +75,40 @@ USB* get_usb_impl()
{
const auto& conf = globalConfig()->usb().greaseweazle();
log("Using Greaseweazle on serial port {}", conf.port());
return createGreaseweazleUsb(conf.port(), conf);
r.reset(createGreaseweazleUsb(conf.port(), conf));
}
/* Otherwise, select a device by USB ID. */
auto candidate = selectDevice();
switch (candidate->id)
else
{
case FLUXENGINE_ID:
log("Using FluxEngine {}", candidate->serial);
return createFluxengineUsb(candidate->device);
/* Otherwise, select a device by USB ID. */
case GREASEWEAZLE_ID:
log("Using Greaseweazle {} on {}",
candidate->serial,
candidate->serialPort);
return createGreaseweazleUsb(
candidate->serialPort, globalConfig()->usb().greaseweazle());
auto candidate = selectDevice();
switch (candidate->id)
{
case FLUXENGINE_ID:
log("Using FluxEngine {}", candidate->serial);
r.reset(createFluxengineUsb(candidate->device));
break;
default:
error("internal");
case GREASEWEAZLE_ID:
log("Using Greaseweazle {} on {}",
candidate->serial,
candidate->serialPort);
r.reset(createGreaseweazleUsb(candidate->serialPort,
globalConfig()->usb().greaseweazle()));
break;
default:
error("internal");
}
}
usb = r.get();
return r;
}
USB& getUsb()
{
if (!usb)
usb = get_usb_impl();
error("USB instance not created");
return *usb;
}

View File

@@ -13,6 +13,9 @@ namespace libusbp
class USB
{
public:
static std::unique_ptr<USB> create();
public:
virtual ~USB();

View File

@@ -1,19 +1,24 @@
#include "lib/globals.h"
#include "lib/vfs/vfs.h"
#include "lib/config.pb.h"
#include <fmt/format.h>
#include <regex>
class CpmFsFilesystem : public Filesystem
class CpmFsFilesystem : public Filesystem, public HasBitmap, public HasMount
{
class Entry
{
public:
Entry(const Bytes& bytes, int map_entry_size)
Entry(const Bytes& bytes, int map_entry_size, unsigned index):
index(index)
{
if (bytes[0] == 0xe5)
deleted = true;
user = bytes[0] & 0x0f;
{
std::stringstream ss;
ss << (char)(user + '0') << ':';
for (int i = 1; i <= 8; i++)
{
@@ -64,13 +69,117 @@ class CpmFsFilesystem : public Filesystem
}
}
Bytes toBytes(int map_entry_size) const
{
Bytes bytes(32);
ByteWriter bw(bytes);
if (deleted)
{
for (int i = 0; i < 32; i++)
bw.write_8(0xe5);
}
else
{
bw.write_8(user);
/* Encode the filename. */
for (int i = 1; i < 12; i++)
bytes[i] = 0x20;
for (char c : filename)
{
if (islower(c))
throw BadPathException();
if (c == '.')
{
if (bw.pos >= 9)
throw BadPathException();
bw.seek(9);
continue;
}
if ((bw.pos == 9) || (bw.pos == 12))
throw BadPathException();
bw.write_8(c);
}
/* Set the mode. */
if (mode.find('R') != std::string::npos)
bytes[9] |= 0x80;
if (mode.find('S') != std::string::npos)
bytes[10] |= 0x80;
if (mode.find('A') != std::string::npos)
bytes[11] |= 0x80;
/* EX, S1, S2, RC */
bw.seek(12);
bw.write_8(extent & 0x1f); /* EX */
bw.write_8(0); /* S1 */
bw.write_8(extent >> 5); /* S2 */
bw.write_8(records); /* RC */
/* Allocation map. */
switch (map_entry_size)
{
case 1:
for (int i = 0; i < 16; i++)
bw.write_8(allocation_map[i]);
break;
case 2:
for (int i = 0; i < 8; i++)
bw.write_le16(allocation_map[i]);
break;
}
}
return bytes;
}
void changeFilename(const std::string& filename)
{
static std::regex FORMATTER("(?:(1?[0-9]):)?([^ .]+)\\.?([^ .]*)");
std::smatch results;
bool matched = std::regex_match(filename, results, FORMATTER);
if (!matched)
throw BadPathException();
std::string user = results[1];
std::string stem = results[2];
std::string ext = results[3];
if (stem.size() > 8)
throw BadPathException();
if (ext.size() > 3)
throw BadPathException();
this->user = std::stoi(user);
if (this->user > 15)
throw BadPathException();
if (ext.empty())
this->filename = stem;
else
this->filename = fmt::format("{}.{}", stem, ext);
}
std::string combinedFilename() const
{
return fmt::format("{}:{}", user, filename);
}
public:
unsigned index;
std::string filename;
std::string mode;
unsigned user;
unsigned extent;
unsigned records;
std::vector<unsigned> allocation_map;
bool deleted = false;
};
public:
@@ -83,7 +192,8 @@ public:
uint32_t capabilities() const override
{
return OP_GETFSDATA | OP_LIST | OP_GETFILE | OP_GETDIRENT;
return OP_GETFSDATA | OP_LIST | OP_GETFILE | OP_PUTFILE | OP_DELETE |
OP_GETDIRENT | OP_CREATE | OP_MOVE | OP_PUTATTRS;
}
std::map<std::string, std::string> getMetadata() override
@@ -94,7 +204,7 @@ public:
for (int d = 0; d < _config.dir_entries(); d++)
{
auto entry = getEntry(d);
if (!entry)
if (entry->deleted)
continue;
for (unsigned block : entry->allocation_map)
@@ -112,6 +222,17 @@ public:
return attributes;
}
void create(bool, const std::string&) override
{
auto& start = _config.filesystem_start();
_filesystemStart =
getOffsetOfSector(start.track(), start.side(), start.sector());
_sectorSize = getLogicalSectorSize(start.track(), start.side());
_directory = Bytes{0xe5} * (_config.dir_entries() * 32);
putCpmBlock(0, _directory);
}
FilesystemStatus check() override
{
return FS_OK;
@@ -127,15 +248,15 @@ public:
for (int d = 0; d < _config.dir_entries(); d++)
{
auto entry = getEntry(d);
if (!entry)
if (entry->deleted)
continue;
auto& dirent = map[entry->filename];
auto& dirent = map[entry->combinedFilename()];
if (!dirent)
{
dirent = std::make_unique<Dirent>();
dirent->path = {entry->filename};
dirent->filename = entry->filename;
dirent->filename = entry->combinedFilename();
dirent->path = {dirent->filename};
dirent->mode = entry->mode;
dirent->length = 0;
dirent->file_type = TYPE_FILE;
@@ -173,6 +294,42 @@ public:
throw FileNotFoundException();
}
void putMetadata(const Path& path,
const std::map<std::string, std::string>& metadata) override
{
mount();
if (path.size() != 1)
throw BadPathException();
/* Only updating MODE is supported. */
if (metadata.empty())
return;
if ((metadata.size() != 1) || (metadata.begin()->first != MODE))
throw UnimplementedFilesystemException();
auto mode = metadata.begin()->second;
/* Update all dirents corresponding to this file. */
bool found = false;
for (int d = 0; d < _config.dir_entries(); d++)
{
std::unique_ptr<Entry> entry = getEntry(d);
if (entry->deleted)
continue;
if (path[0] == entry->combinedFilename())
{
entry->mode = mode;
putEntry(entry);
found = true;
}
}
if (!found)
throw FileNotFoundException();
unmount();
}
Bytes getFile(const Path& path) override
{
mount();
@@ -190,9 +347,9 @@ public:
for (int d = 0; d < _config.dir_entries(); d++)
{
entry = getEntry(d);
if (!entry)
if (entry->deleted)
continue;
if (path[0] != entry->filename)
if (path[0] != entry->combinedFilename())
continue;
if (entry->extent < logicalExtent)
continue;
@@ -201,7 +358,7 @@ public:
break;
}
if (!entry)
if (entry->deleted)
{
if (logicalExtent == 0)
throw FileNotFoundException();
@@ -236,8 +393,160 @@ public:
return data;
}
private:
void mount()
public:
void putFile(const Path& path, const Bytes& bytes) override
{
mount();
if (path.size() != 1)
throw BadPathException();
/* Test to see if the file already exists. */
for (int d = 0; d < _config.dir_entries(); d++)
{
std::unique_ptr<Entry> entry = getEntry(d);
if (entry->deleted)
continue;
if (path[0] == entry->combinedFilename())
throw CannotWriteException();
}
/* Write blocks, one at a time. */
std::unique_ptr<Entry> entry;
ByteReader br(bytes);
while (!br.eof())
{
unsigned extent = br.pos / 0x4000;
Bytes block = br.read(_config.block_size());
/* Allocate a block and write it. */
auto bit = std::find(_bitmap.begin(), _bitmap.end(), false);
if (bit == _bitmap.end())
throw DiskFullException();
*bit = true;
unsigned blocknum = bit - _bitmap.begin();
putCpmBlock(blocknum, block);
/* Do we need a new directory entry? */
if (!entry ||
entry->allocation_map[std::size(entry->allocation_map) - 1])
{
if (entry)
{
entry->records = 0x80;
putEntry(entry);
}
entry.reset();
for (int d = 0; d < _config.dir_entries(); d++)
{
entry = getEntry(d);
if (entry->deleted)
break;
entry.reset();
}
if (!entry)
throw DiskFullException();
entry->deleted = false;
entry->changeFilename(path[0]);
entry->extent = extent;
entry->mode = "";
std::fill(entry->allocation_map.begin(),
entry->allocation_map.end(),
0);
}
/* Hook up the block in the allocation map. */
auto mit = std::find(
entry->allocation_map.begin(), entry->allocation_map.end(), 0);
*mit = blocknum;
}
if (entry)
{
entry->records = ((bytes.size() & 0x3fff) + 127) / 128;
putEntry(entry);
}
unmount();
}
void moveFile(const Path& oldPath, const Path& newPath) override
{
mount();
if ((oldPath.size() != 1) || (newPath.size() != 1))
throw BadPathException();
/* Check to make sure that the file exists, and that the new filename
* does not. */
bool found = false;
for (int d = 0; d < _config.dir_entries(); d++)
{
auto entry = getEntry(d);
if (entry->deleted)
continue;
auto filename = entry->combinedFilename();
if (filename == oldPath[0])
found = true;
if (filename == newPath[0])
throw CannotWriteException();
}
if (!found)
throw FileNotFoundException();
/* Now do the rename. */
for (int d = 0; d < _config.dir_entries(); d++)
{
auto entry = getEntry(d);
if (entry->deleted)
continue;
auto filename = entry->combinedFilename();
if (filename == oldPath[0])
{
entry->changeFilename(newPath[0]);
putEntry(entry);
}
}
unmount();
}
void deleteFile(const Path& path) override
{
mount();
if (path.size() != 1)
throw BadPathException();
/* Remove all dirents for this file. */
bool found = false;
for (int d = 0; d < _config.dir_entries(); d++)
{
auto entry = getEntry(d);
if (entry->deleted)
continue;
if (path[0] != entry->combinedFilename())
continue;
entry->deleted = true;
putEntry(entry);
found = true;
}
if (!found)
throw FileNotFoundException();
unmount();
}
public:
void mount() override
{
auto& start = _config.filesystem_start();
_filesystemStart =
@@ -268,26 +577,71 @@ private:
_blocksPerLogicalExtent = 16384 / _config.block_size();
_directory = getCpmBlock(0, _dirBlocks);
/* Create the allocation bitmap. */
_bitmap.clear();
_bitmap.resize(_filesystemBlocks);
for (int d = 0; d < _dirBlocks; d++)
_bitmap[d] = true;
for (int d = 0; d < _config.dir_entries(); d++)
{
std::unique_ptr<Entry> entry = getEntry(d);
if (entry->deleted)
continue;
for (unsigned block : entry->allocation_map)
{
if (block >= _filesystemBlocks)
throw BadFilesystemException();
if (block)
_bitmap[block] = true;
}
}
}
void unmount()
{
putCpmBlock(0, _directory);
}
private:
std::unique_ptr<Entry> getEntry(unsigned d)
{
auto bytes = _directory.slice(d * 32, 32);
if (bytes[0] == 0xe5)
return nullptr;
return std::make_unique<Entry>(bytes, _allocationMapSize);
return std::make_unique<Entry>(bytes, _allocationMapSize, d);
}
Bytes getCpmBlock(uint32_t number, uint32_t count = 1)
void putEntry(std::unique_ptr<Entry>& entry)
{
unsigned sector = number * _blockSectors;
ByteWriter bw(_directory);
bw.seek(entry->index * 32);
bw.append(entry->toBytes(_allocationMapSize));
}
unsigned computeSector(uint32_t block) const
{
unsigned sector = block * _blockSectors;
if (_config.has_padding())
sector += (sector / _config.padding().every()) *
_config.padding().amount();
return sector;
}
Bytes getCpmBlock(uint32_t block, uint32_t count = 1)
{
return getLogicalSector(
sector + _filesystemStart, _blockSectors * count);
computeSector(block) + _filesystemStart, _blockSectors * count);
}
void putCpmBlock(uint32_t block, const Bytes& bytes)
{
putLogicalSector(computeSector(block) + _filesystemStart, bytes);
}
public:
std::vector<bool> getBitmapForDebugging() override
{
return _bitmap;
}
private:
@@ -303,6 +657,7 @@ private:
uint32_t _blocksPerLogicalExtent;
int _allocationMapSize;
Bytes _directory;
std::vector<bool> _bitmap;
};
std::unique_ptr<Filesystem> Filesystem::createCpmFsFilesystem(

View File

@@ -277,4 +277,18 @@ public:
static std::unique_ptr<Filesystem> createFilesystemFromConfig();
};
/* Used for tests only. */
class HasBitmap
{
public:
virtual std::vector<bool> getBitmapForDebugging() = 0;
};
class HasMount
{
public:
virtual void mount() = 0;
};
#endif

View File

@@ -1,11 +1,11 @@
from build.ab import Rule, normalrule, Targets
from build.ab import Rule, simplerule, Targets, TargetsMap
from build.c import cxxprogram, HostToolchain
encoders = {}
@Rule
def protoencode(self, name, srcs: Targets, proto, symbol):
def protoencode_single(self, name, srcs: Targets, proto, symbol):
if proto not in encoders:
r = cxxprogram(
name="protoencode_" + proto,
@@ -24,16 +24,37 @@ def protoencode(self, name, srcs: Targets, proto, symbol):
r = encoders[proto]
r.materialise()
normalrule(
simplerule(
replaces=self,
ins=srcs,
outs=[f"{name}.cc"],
outs=[f"={name}.cc"],
deps=[r],
commands=["{deps[0]} {ins} {outs} " + symbol],
label="PROTOENCODE",
)
@Rule
def protoencode(self, name, proto, srcs: TargetsMap, symbol):
encoded = [
protoencode_single(
name=f"{k}_cc",
srcs=[v],
proto=proto,
symbol=f"{symbol}_{k}_pb",
)
for k, v in srcs.items()
]
simplerule(
replaces=self,
ins=encoded,
outs=[f"={name}.cc"],
commands=["cat {ins} > {outs}"],
label="CONCAT",
)
cxxprogram(
name="mkdoc",
srcs=["./mkdoc.cc"],

View File

@@ -8,9 +8,9 @@ script="$4"
flags="$5"
dir="$6"
srcfile=$dir.$format.src.img
fluxfile=$dir.$format.$ext
destfile=$dir.$format.dest.img
srcfile=$dir/src.img
fluxfile=$dir/flux.$ext
destfile=$dir/dest.img
dd if=/dev/urandom of=$srcfile bs=1048576 count=2 2>&1
@@ -21,9 +21,11 @@ if [ ! -s $destfile ]; then
exit 1
fi
truncate $srcfile -r $destfile
truncate -r $destfile $srcfile
if ! cmp $srcfile $destfile; then
echo "Comparison failed!" >&2
echo "Run this to repeat:" >&2
echo "./scripts/encodedecodetest.sh \"$1\" \"$2\" \"$3\" \"$4\" \"$5\" \"$6\"" >&2
exit 1
fi
exit 0

View File

@@ -121,11 +121,12 @@ int main(int argc, const char* argv[])
}
auto data = message.SerializeAsString();
auto name = argv[3];
output << "#include \"lib/globals.h\"\n"
<< "#include \"lib/proto.h\"\n"
<< "#include <string_view>\n"
<< "static const uint8_t rawData[] = {";
<< "static const uint8_t " << name << "_rawData[] = {";
int count = 0;
for (char c : data)
@@ -140,12 +141,12 @@ int main(int argc, const char* argv[])
}
output << "\n};\n";
output << "extern const std::string_view " << argv[3] << "_data;\n";
output << "const std::string_view " << argv[3]
<< "_data = std::string_view((const char*)rawData, " << data.size()
output << "extern const std::string_view " << name << "_data;\n";
output << "const std::string_view " << name
<< "_data = std::string_view((const char*)" << name << "_rawData, " << data.size()
<< ");\n";
output << "extern const ConfigProto " << argv[3] << ";\n";
output << "const ConfigProto " << argv[3] << " = parseConfigBytes("
output << "extern const ConfigProto " << name << ";\n";
output << "const ConfigProto " << name << " = parseConfigBytes("
<< argv[3] << "_data);\n";
return 0;

View File

@@ -252,6 +252,7 @@ int mainAnalyseDriveResponse(int argc, const char* argv[])
if (globalConfig()->flux_sink().type() != FLUXTYPE_DRIVE)
error("this only makes sense with a real disk drive");
auto usb = USB::create();
usbSetDrive(globalConfig()->drive().drive(),
globalConfig()->drive().high_density(),
globalConfig()->drive().index_mode());

View File

@@ -7,6 +7,7 @@
#include "lib/csvreader.h"
#include "lib/image.h"
#include "lib/decoders/fluxmapreader.h"
#include "lib/usb/usb.h"
#include "agg2d.h"
#include "stb_image_write.h"
#include <math.h>
@@ -228,6 +229,7 @@ static Image readCsv(const std::string& filename)
int mainAnalyseLayout(int argc, const char* argv[])
{
flags.parseFlags(argc, argv);
auto usb = USB::create();
Image image = readCsv(source.get());
visualiseSectorsToFile(image, "out.svg");

View File

@@ -11,6 +11,7 @@
#include "fluxengine.h"
#include "lib/vfs/sectorinterface.h"
#include "lib/vfs/vfs.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -27,16 +28,10 @@ int mainFormat(int argc, const char* argv[])
showProfiles("format", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
filesystem->create(quick, volumeName);
filesystem->flushChanges();
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
auto usb = USB::create();
auto filesystem = Filesystem::createFilesystemFromConfig();
filesystem->create(quick, volumeName);
filesystem->flushChanges();
return 0;
}

View File

@@ -12,6 +12,7 @@
#include "lib/vfs/sectorinterface.h"
#include "lib/vfs/vfs.h"
#include "lib/utils.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -24,18 +25,12 @@ int mainGetDiskInfo(int argc, const char* argv[])
showProfiles("getdiskinfo", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
auto attributes = filesystem->getMetadata();
auto usb = USB::create();
auto filesystem = Filesystem::createFilesystemFromConfig();
auto attributes = filesystem->getMetadata();
for (const auto& e : attributes)
fmt::print("{}={}\n", e.first, quote(e.second));
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
for (const auto& e : attributes)
fmt::print("{}={}\n", e.first, quote(e.second));
return 0;
}

View File

@@ -11,6 +11,7 @@
#include "fluxengine.h"
#include "lib/vfs/sectorinterface.h"
#include "lib/vfs/vfs.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -26,24 +27,19 @@ int mainGetFile(int argc, const char* argv[])
showProfiles("getfile", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
Path inputFilename(directory);
if (inputFilename.size() == 0)
error("you must supply a filename to read");
auto usb = USB::create();
std::string outputFilename = output;
if (outputFilename.empty())
outputFilename = inputFilename.back();
Path inputFilename(directory);
if (inputFilename.size() == 0)
error("you must supply a filename to read");
auto filesystem = Filesystem::createFilesystemFromConfig();
auto data = filesystem->getFile(inputFilename);
data.writeToFile(outputFilename);
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
std::string outputFilename = output;
if (outputFilename.empty())
outputFilename = inputFilename.back();
auto filesystem = Filesystem::createFilesystemFromConfig();
auto data = filesystem->getFile(inputFilename);
data.writeToFile(outputFilename);
return 0;
}

View File

@@ -12,6 +12,7 @@
#include "lib/vfs/sectorinterface.h"
#include "lib/vfs/vfs.h"
#include "lib/utils.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -26,18 +27,13 @@ int mainGetFileInfo(int argc, const char* argv[])
showProfiles("getfileinfo", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
auto dirent = filesystem->getDirent(Path(directory));
auto usb = USB::create();
for (const auto& e : dirent->attributes)
fmt::print("{}={}\n", e.first, quote(e.second));
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
auto filesystem = Filesystem::createFilesystemFromConfig();
auto dirent = filesystem->getDirent(Path(directory));
for (const auto& e : dirent->attributes)
fmt::print("{}={}\n", e.first, quote(e.second));
return 0;
}

View File

@@ -10,6 +10,7 @@
#include "lib/decoders/rawbits.h"
#include "lib/sector.h"
#include "lib/proto.h"
#include "lib/usb/usb.h"
static FlagGroup flags;
@@ -133,6 +134,7 @@ int mainInspect(int argc, const char* argv[])
{
globalConfig().overrides()->mutable_flux_source()->set_type(FLUXTYPE_DRIVE);
flags.parseFlagsWithConfigFiles(argc, argv, {});
auto usb = USB::create();
auto& fluxSource = globalConfig().getFluxSource();
const auto fluxmap = fluxSource->readFlux(trackFlag, headFlag)->next();

View File

@@ -11,6 +11,7 @@
#include "lib/vfs/sectorinterface.h"
#include "lib/vfs/vfs.h"
#include "lib/utils.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -40,33 +41,28 @@ int mainLs(int argc, const char* argv[])
showProfiles("ls", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
auto files = filesystem->list(Path(directory));
auto usb = USB::create();
int maxlen = 0;
for (const auto& dirent : files)
maxlen = std::max(maxlen, (int)quote(dirent->filename).size());
auto filesystem = Filesystem::createFilesystemFromConfig();
auto files = filesystem->list(Path(directory));
uint32_t total = 0;
for (const auto& dirent : files)
{
fmt::print("{} {:{}} {:6} {:4} {}\n",
fileTypeChar(dirent->file_type),
quote(dirent->filename),
maxlen + 2,
dirent->length,
dirent->mode,
dirent->attributes[Filesystem::CTIME]);
total += dirent->length;
}
fmt::print("({} files, {} bytes)\n", files.size(), total);
}
catch (const FilesystemException& e)
int maxlen = 0;
for (const auto& dirent : files)
maxlen = std::max(maxlen, (int)quote(dirent->filename).size());
uint32_t total = 0;
for (const auto& dirent : files)
{
error("{}", e.message);
fmt::print("{} {:{}} {:6} {:4} {}\n",
fileTypeChar(dirent->file_type),
quote(dirent->filename),
maxlen + 2,
dirent->length,
dirent->mode,
dirent->attributes[Filesystem::CTIME]);
total += dirent->length;
}
fmt::print("({} files, {} bytes)\n", files.size(), total);
return 0;
}

View File

@@ -4,6 +4,7 @@
#include "fluxengine.h"
#include "lib/vfs/vfs.h"
#include "lib/utils.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -18,21 +19,15 @@ int mainMkDir(int argc, const char* argv[])
showProfiles("mkdir", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
auto usb = USB::create();
auto filesystem = Filesystem::createFilesystemFromConfig();
Path path(filename);
if (path.size() == 0)
error("filename missing");
Path path(filename);
if (path.size() == 0)
error("filename missing");
filesystem->createDirectory(path);
filesystem->flushChanges();
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
filesystem->createDirectory(path);
filesystem->flushChanges();
return 0;
}

View File

@@ -4,6 +4,7 @@
#include "fluxengine.h"
#include "lib/vfs/vfs.h"
#include "lib/utils.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -19,25 +20,19 @@ int mainMv(int argc, const char* argv[])
showProfiles("mv", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
auto usb = USB::create();
auto filesystem = Filesystem::createFilesystemFromConfig();
Path oldPath(oldFilename);
if (oldPath.size() == 0)
error("old filename missing");
Path oldPath(oldFilename);
if (oldPath.size() == 0)
error("old filename missing");
Path newPath(newFilename);
if (newPath.size() == 0)
error("new filename missing");
Path newPath(newFilename);
if (newPath.size() == 0)
error("new filename missing");
filesystem->moveFile(oldPath, newPath);
filesystem->flushChanges();
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
filesystem->moveFile(oldPath, newPath);
filesystem->flushChanges();
return 0;
}

View File

@@ -11,6 +11,7 @@
#include "fluxengine.h"
#include "lib/vfs/sectorinterface.h"
#include "lib/vfs/vfs.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -26,25 +27,19 @@ int mainPutFile(int argc, const char* argv[])
showProfiles("putfile", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
std::string inputFilename = input;
if (inputFilename.empty())
error("you must supply a local file to read from");
auto usb = USB::create();
std::string inputFilename = input;
if (inputFilename.empty())
error("you must supply a local file to read from");
Path outputFilename(path);
if (outputFilename.size() == 0)
error("you must supply a destination path to write to");
Path outputFilename(path);
if (outputFilename.size() == 0)
error("you must supply a destination path to write to");
auto data = Bytes::readFromFile(inputFilename);
auto filesystem = Filesystem::createFilesystemFromConfig();
filesystem->putFile(outputFilename, data);
filesystem->flushChanges();
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
auto data = Bytes::readFromFile(inputFilename);
auto filesystem = Filesystem::createFilesystemFromConfig();
filesystem->putFile(outputFilename, data);
filesystem->flushChanges();
return 0;
}

View File

@@ -8,6 +8,7 @@
#include "lib/fluxsink/fluxsink.h"
#include "lib/fluxsource/fluxsource.h"
#include "lib/imagewriter/imagewriter.h"
#include "lib/usb/usb.h"
#include "fluxengine.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -55,6 +56,7 @@ int mainRawRead(int argc, const char* argv[])
showProfiles("rawread", formats);
globalConfig().overrides()->mutable_flux_source()->set_type(FLUXTYPE_DRIVE);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
auto usb = USB::create();
if (globalConfig()->flux_sink().type() == FLUXTYPE_DRIVE)
error("you can't use rawread to write to hardware");

View File

@@ -6,6 +6,7 @@
#include "lib/proto.h"
#include "lib/fluxsource/fluxsource.h"
#include "lib/fluxsink/fluxsink.h"
#include "lib/usb/usb.h"
#include "fluxengine.h"
#include <fstream>
#include <ctype.h>
@@ -61,6 +62,7 @@ int mainRawWrite(int argc, const char* argv[])
showProfiles("rawwrite", formats);
globalConfig().overrides()->mutable_flux_sink()->set_type(FLUXTYPE_DRIVE);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
auto usb = USB::create();
if (globalConfig()->flux_source().type() == FLUXTYPE_DRIVE)
error("you can't use rawwrite to read from hardware");

View File

@@ -8,6 +8,7 @@
#include "lib/fluxsource/fluxsource.h"
#include "lib/fluxsink/fluxsink.h"
#include "lib/imagewriter/imagewriter.h"
#include "lib/usb/usb.h"
#include "fluxengine.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -60,6 +61,7 @@ int mainRead(int argc, const char* argv[])
showProfiles("read", formats);
globalConfig().set("flux_source.type", "FLUXTYPE_DRIVE");
flags.parseFlagsWithConfigFiles(argc, argv, formats);
auto usb = USB::create();
if (globalConfig()->decoder().copy_flux_to().type() == FLUXTYPE_DRIVE)
error("you cannot copy flux to a hardware device");

View File

@@ -4,6 +4,7 @@
#include "fluxengine.h"
#include "lib/vfs/vfs.h"
#include "lib/utils.h"
#include "lib/usb/usb.h"
#include "src/fileutils.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -18,21 +19,15 @@ int mainRm(int argc, const char* argv[])
showProfiles("rm", formats);
flags.parseFlagsWithConfigFiles(argc, argv, formats);
try
{
auto filesystem = Filesystem::createFilesystemFromConfig();
auto usb = USB::create();
auto filesystem = Filesystem::createFilesystemFromConfig();
Path path(filename);
if (path.size() == 0)
error("filename missing");
Path path(filename);
if (path.size() == 0)
error("filename missing");
filesystem->deleteFile(path);
filesystem->flushChanges();
}
catch (const FilesystemException& e)
{
error("{}", e.message);
}
filesystem->deleteFile(path);
filesystem->flushChanges();
return 0;
}

View File

@@ -23,6 +23,7 @@ int mainRpm(int argc, const char* argv[])
if (globalConfig()->flux_source().type() != FLUXTYPE_DRIVE)
error("this only makes sense with a real disk drive");
auto usb = USB::create();
usbSetDrive(globalConfig()->drive().drive(),
false,
globalConfig()->drive().index_mode());

View File

@@ -26,6 +26,7 @@ int mainSeek(int argc, const char* argv[])
if (globalConfig()->flux_source().type() != FLUXTYPE_DRIVE)
error("this only makes sense with a real disk drive");
auto usb = USB::create();
usbSetDrive(globalConfig()->drive().drive(),
false,
globalConfig()->drive().index_mode());

View File

@@ -7,6 +7,7 @@ static FlagGroup flags;
int mainTestBandwidth(int argc, const char* argv[])
{
flags.parseFlagsWithConfigFiles(argc, argv, {});
auto usb = USB::create();
usbTestBulkWrite();
usbTestBulkRead();
return 0;

View File

@@ -1,5 +1,6 @@
#include "lib/globals.h"
#include "lib/flags.h"
#include "lib/usb/usb.h"
#include "lib/usb/usbfinder.h"
#include <fmt/format.h>
@@ -8,6 +9,7 @@ static FlagGroup flags;
int mainTestDevices(int argc, const char* argv[])
{
flags.parseFlagsWithConfigFiles(argc, argv, {});
auto usb = USB::create();
auto candidates = findUsbDevices();
switch (candidates.size())

View File

@@ -15,6 +15,7 @@ static std::string display_voltages(struct voltages& v)
int mainTestVoltages(int argc, const char* argv[])
{
flags.parseFlagsWithConfigFiles(argc, argv, {});
auto usb = USB::create();
struct voltages_frame f;
usbMeasureVoltages(&f);

View File

@@ -11,6 +11,7 @@
#include "arch/brother/brother.h"
#include "arch/ibm/ibm.h"
#include "lib/imagereader/imagereader.h"
#include "lib/usb/usb.h"
#include "fluxengine.h"
#include <google/protobuf/text_format.h>
#include <fstream>
@@ -66,6 +67,7 @@ int mainWrite(int argc, const char* argv[])
globalConfig().setVerificationFluxSource("drive:0");
flags.parseFlagsWithConfigFiles(argc, argv, formats);
auto usb = USB::create();
auto& reader = globalConfig().getImageReader();
std::shared_ptr<Image> image = reader->readMappedImage();

View File

@@ -9,21 +9,21 @@ Brother word processor disks are weird, using custom tooling and chipsets.
They are completely not PC compatible in every possible way other than the
size.
Different word processors use different disk formats --- the only ones
supported by FluxEngine are the 120kB and 240kB 3.5" formats. The default
options are for the 240kB format. For the 120kB format, which is 40 track, do
`fluxengine read brother -s :t=1-79x2`.
Different word processors use different disk formats --- the only ones supported
by FluxEngine are the 120kB and 240kB 3.5" formats. Use the `--120` and `--240`
options to select which one.
Apparently about 20% of Brother word processors have alignment issues which
means that the disks can't be read by FluxEngine (because the tracks on the
disk don't line up with the position of the head in a PC drive). The word
processors themselves solved this by microstepping until they found where the
real track is, but normal PC drives aren't capable of doing this. Particularly
with the 120kB disks, you might want to fiddle with the start track (e.g.
`:t=0-79x2`) to get a clean read. Keep an eye on the bad sector map that's
dumped at the end of a read. My word processor likes to put logical track 0 on
physical track 3, which means that logical track 77 is on physical track 80;
luckily my PC drive can access track 80.
means that the disks can't be read by FluxEngine (because the tracks on the disk
don't line up with the position of the head in a PC drive). The word processors
themselves solved this by microstepping until they found where the real track
is, but normal PC drives aren't capable of doing this. Particularly with the
120kB disks, you might want to fiddle with the head bias (e.g.
`--drive.head_bias=3`) to get a clean read. Keep an eye on the bad sector map
that's dumped at the end of a read. My word processor likes to put logical track
0 on physical track 3, which means that logical track 77 is on physical track
80, so I need that `head_bias` value of 3; luckily my PC drive can access track
80.
Using FluxEngine to *write* disks isn't a problem, so the
simplest solution is to use FluxEngine to create a new disk, with the tracks
@@ -192,7 +192,7 @@ option_group {
}
drive {
head_bias: 3
head_bias: 0
}
filesystem {

View File

@@ -1,4 +1,4 @@
from build.ab import normalrule, export
from build.ab import simplerule, export
from build.c import cxxlibrary
from scripts.build import protoencode
@@ -34,46 +34,44 @@ formats = [
"rx50",
"shugart_drive",
"smaky6",
"tartu",
"tids990",
"tiki",
"victor9k",
"zilogmcz",
]
normalrule(
simplerule(
name="table_cc",
ins=[f"./{name}.textpb" for name in formats],
deps=["scripts/mktable.sh"],
outs=["table.cc"],
outs=["=table.cc"],
commands=[
"sh scripts/mktable.sh formats " + (" ".join(formats)) + " > {outs}"
],
label="MKTABLE",
)
encoded = [
protoencode(
name=f"{name}_cc",
srcs=[f"./{name}.textpb"],
proto="ConfigProto",
symbol=f"formats_{name}_pb",
)
for name in formats
]
protoencode(
name="formats_cc",
srcs={name: f"./{name}.textpb" for name in formats},
proto="ConfigProto",
symbol="formats",
)
cxxlibrary(
name="formats",
srcs=[".+table_cc"] + encoded,
srcs=[".+formats_cc", ".+table_cc"],
deps=["+lib", "lib+config_proto_lib"],
)
export(
name="docs",
items={
f"doc/disk-{f}.md": normalrule(
f"doc/disk-{f}.md": simplerule(
name=f"{f}_doc",
ins=["scripts+mkdoc"],
outs=[f"disk-{f}.md"],
outs=[f"=disk-{f}.md"],
commands=["{ins[0]} " + f + " | tr -d '\\r' > {outs[0]}"],
label="MKDOC",
)

105
src/formats/tartu.textpb Normal file
View File

@@ -0,0 +1,105 @@
shortname: 'Tartu'
comment: 'The Palivere and variations'
read_support_status: UNICORN
write_support_status: DINOSAUR
documentation:
<<<
The Tartu Palivere is a 1988 Z80-based computer from Estonia. It is a CP/M
machine with 64kB of RAM, running off a 2MHz КP580ВМ80А, a Soviet Union 8080
clone; it operated off punched tape, cassette, external hard drive or floppy, and was notable as being the first ever computer with an Estonian keyboard.
<div style="text-align: center">
<img src="tartu.jpg" alt="The Tartu computer's developer Leo Humal working with one."/>
</div>
From a floppy disk perspective, it is interesting because the floppy drive
interface is almost entirely handled in software --- necessary at the time as
the usual floppy disk interface chip at the time, the КR1818VG93 (a Soviet clone
of the WD1793), was hard to find. Instead, the floppy controller board was
implemented entirely using TTL logic. Despite this, the encoding is fairly high
density, using MFM and with up to 780kB on a double-sided 80 track disk.
<div style="text-align: center">
<img src="tartu-fdc.jpg" alt="The Tartu FDC with Soviet TTL logic chips."/>
</div>
FluxEngine supports reading and writing Tartu disks with CP/M filesystem access.
>>>
documentation:
<<<
## References
- [The Estonia Museum of Electronics](https://www.elektroonikamuuseum.ee/tartu_arvuti_lugu.html)
>>>
image_writer {
filename: "tartu.img"
type: IMAGETYPE_IMG
}
layout {
layoutdata {
sector_size: 128
physical {
start_sector: 1
count: 39
}
filesystem {
start_sector: 1
count: 39
skew: 3
}
}
}
encoder {
tartu {}
}
decoder {
tartu {}
}
option_group {
comment: "$formats"
option {
name: "390"
comment: '390kB 5.25" 40-track DSDD'
config {
layout {
format_type: FORMATTYPE_40TRACK
tracks: 40
sides: 2
}
}
}
option {
name: "780"
comment: '780kB 5.25" 80-track DSDD'
config {
layout {
format_type: FORMATTYPE_80TRACK
tracks: 80
sides: 2
}
}
}
}
filesystem {
type: CPMFS
cpmfs {
filesystem_start {
track: 1
}
block_size: 2048
dir_entries: 128
}
}

View File

@@ -1,4 +1,4 @@
from build.ab import emit, normalrule
from build.ab import emit, simplerule
from build.c import cxxprogram
import config
@@ -18,10 +18,10 @@ endif
extrasrcs = []
if config.windows:
extrasrcs += [
normalrule(
simplerule(
name="rc",
ins=["./windres.rc"],
outs=["rc.o"],
outs=["=rc.o"],
deps=["./manifest.xml", "extras+fluxengine_ico"],
commands=["$(WINDRES) {ins[0]} {outs[0]}"],
label="WINDRES",
@@ -72,24 +72,24 @@ cxxprogram(
)
if config.osx:
normalrule(
simplerule(
name="fluxengine_pkg",
ins=[".+fluxengine_app"],
outs=["FluxEngine.pkg"],
outs=["=FluxEngine.pkg"],
commands=[
"pkgbuild --quiet --install-location /Applications --component {ins[0]} {outs[0]}"
],
label="PKGBUILD",
)
normalrule(
simplerule(
name="fluxengine_app",
ins=[
".+gui",
"extras+fluxengine_icns",
"extras/FluxEngine.app.template/",
],
outs=["FluxEngine.app"],
outs=["=FluxEngine.app"],
commands=[
"rm -rf {outs[0]}",
"cp -a {ins[2]} {outs[0]}",

View File

@@ -1,4 +1,4 @@
from build.ab import normalrule
from build.ab import simplerule
from build.c import cxxlibrary
from scripts.build import protoencode
@@ -8,11 +8,11 @@ drivetypes = [
"apple2",
]
normalrule(
name="drivetypes_cc",
simplerule(
name="drivetypes_table_cc",
ins=[f"./{name}.textpb" for name in drivetypes],
deps=["scripts/mktable.sh"],
outs=["table.cc"],
outs=["=table.cc"],
commands=[
"sh scripts/mktable.sh drivetypes "
+ (" ".join(drivetypes))
@@ -21,14 +21,16 @@ normalrule(
label="MKTABLE",
)
encoded = [
protoencode(
name=f"{name}_cc",
srcs=[f"./{name}.textpb"],
proto="ConfigProto",
symbol=f"drivetypes_{name}_pb",
)
for name in drivetypes
]
cxxlibrary(name="drivetypes", srcs=[".+drivetypes_cc"] + encoded, deps=["+lib"])
protoencode(
name="drivetypes_cc",
srcs={name: f"./{name}.textpb" for name in drivetypes},
proto="ConfigProto",
symbol="drivetypes",
)
cxxlibrary(
name="drivetypes",
srcs=[".+drivetypes_cc", ".+drivetypes_table_cc"],
deps=["+lib"],
)

View File

@@ -10,6 +10,7 @@
#include "lib/imagereader/imagereader.h"
#include "lib/imagewriter/imagewriter.h"
#include "lib/layout.h"
#include "lib/usb/usb.h"
#include "texteditorwindow.h"
#include "iconbutton.h"
#include <wx/config.h>
@@ -291,6 +292,11 @@ public:
globalConfig().validateAndThrow();
ClearLog();
/* Ensure the USB device is opened. */
_usb.reset();
_usb = USB::create();
}
const wxBitmap GetBitmap() override
@@ -809,6 +815,7 @@ private:
std::string _extraConfiguration;
std::set<std::pair<std::string, std::string>> _formatOptions;
int _currentlyDisplayedFormat = wxNOT_FOUND - 1;
std::unique_ptr<USB> _usb;
};
IdlePanel* IdlePanel::Create(MainWindow* mainWindow, wxSimplebook* parent)

View File

@@ -2,14 +2,12 @@ from build.ab import export
from build.c import cxxprogram
from build.protobuf import proto, protocc
from build.utils import test
from scripts.build import protoencode
from scripts.build import protoencode_single
proto(
name="test_proto",
srcs=[
"./testproto.proto",
],
srcs=["./testproto.proto"],
)
protocc(
@@ -49,7 +47,7 @@ export(
name="proto_test_exe",
srcs=[
"./proto.cc",
protoencode(
protoencode_single(
name="testproto_cc",
srcs=["./testproto.textpb"],
proto="TestProto",

View File

@@ -37,14 +37,50 @@ namespace
};
}
namespace snowhouse
{
template <>
struct Stringizer<std::vector<bool>>
{
static std::string ToString(const std::vector<bool>& vector)
{
std::stringstream stream;
stream << '{';
bool first = true;
for (const auto& item : vector)
{
if (!first)
stream << ", ";
stream << item;
first = false;
}
stream << '}';
return stream.str();
}
};
template <>
struct Stringizer<Bytes>
{
static std::string ToString(const Bytes& bytes)
{
std::stringstream stream;
stream << '\n';
hexdump(stream, bytes);
return stream.str();
}
};
}
static Bytes createDirent(const std::string& filename,
int extent,
int records,
const std::initializer_list<int> blocks)
const std::initializer_list<int> blocks,
int user = 0)
{
Bytes dirent;
ByteWriter bw(dirent);
bw.write_8(0);
bw.write_8(user);
bw.append(filename);
while (bw.pos != 12)
bw.write_8(' ');
@@ -69,6 +105,21 @@ static void setBlock(
sectors->put(block, 0, i)->data = data.slice(i * 256, 256);
}
static Bytes getBlock(
const std::shared_ptr<SectorInterface>& sectors, int block, int length)
{
Bytes bytes;
ByteWriter bw(bytes);
for (int i = 0; i < (length + 127) / 128; i++)
{
auto sector = sectors->get(block, 0, i);
bw.append(sector->data);
}
return bytes;
}
static void testPartialExtent()
{
auto sectors = std::make_shared<TestSectorInterface>();
@@ -113,6 +164,143 @@ static void testLogicalExtents()
AssertThat(data[0x4000 * 2], Equals(3));
}
static void testBitmap()
{
auto sectors = std::make_shared<TestSectorInterface>();
auto fs = Filesystem::createCpmFsFilesystem(
globalConfig()->filesystem(), sectors);
setBlock(sectors,
0,
createDirent("FILE", 1, 128, {1, 0, 0, 0, 0, 0, 0, 0, 2}) +
createDirent("FILE", 2, 128, {4}) + (blank_dirent * 62));
dynamic_cast<HasMount*>(fs.get())->mount();
std::vector<bool> bitmap =
dynamic_cast<HasBitmap*>(fs.get())->getBitmapForDebugging();
AssertThat(bitmap,
Equals(std::vector<bool>{
1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}));
}
#if 0
static void testPutGet()
{
auto sectors = std::make_shared<TestSectorInterface>();
auto fs = Filesystem::createCpmFsFilesystem(
globalConfig()->filesystem(), sectors);
fs->create(true, "volume");
fs->putFile(Path("0:FILE1"), Bytes{1, 2, 3, 4});
fs->putFile(Path("0:FILE2"), Bytes{5, 6, 7, 8});
dynamic_cast<HasMount*>(fs.get())->mount();
std::vector<bool> bitmap =
dynamic_cast<HasBitmap*>(fs.get())->getBitmapForDebugging();
AssertThat(bitmap,
Equals(std::vector<bool>{
1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}));
auto directory = getBlock(sectors, 0, 256).slice(0, 64);
AssertThat(directory,
Equals(createDirent("FILE1", 0, 1, {1}) +
createDirent("FILE2", 0, 1, {2})));
auto file1 = getBlock(sectors, 1, 8).slice(0, 8);
AssertThat(file1, Equals(Bytes{1, 2, 3, 4, 0, 0, 0, 0}));
auto file2 = getBlock(sectors, 2, 8).slice(0, 8);
AssertThat(file2, Equals(Bytes{5, 6, 7, 8, 0, 0, 0, 0}));
}
static void testPutBigFile()
{
auto sectors = std::make_shared<TestSectorInterface>();
auto fs = Filesystem::createCpmFsFilesystem(
globalConfig()->filesystem(), sectors);
fs->create(true, "volume");
Bytes filedata;
ByteWriter bw(filedata);
while (filedata.size() < 0x9000)
bw.write_le32(bw.pos);
fs->putFile(Path("0:BIGFILE"), filedata);
auto directory = getBlock(sectors, 0, 256).slice(0, 64);
AssertThat(directory,
Equals(createDirent("BIGFILE",
0,
0x80,
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) +
createDirent("BIGFILE", 2, 0x20, {17, 18})));
}
static void testDelete()
{
auto sectors = std::make_shared<TestSectorInterface>();
auto fs = Filesystem::createCpmFsFilesystem(
globalConfig()->filesystem(), sectors);
fs->create(true, "volume");
fs->putFile(Path("0:FILE1"), Bytes{1, 2, 3, 4});
fs->putFile(Path("0:FILE2"), Bytes{5, 6, 7, 8});
fs->deleteFile(Path("0:FILE1"));
auto directory = getBlock(sectors, 0, 256).slice(0, 64);
AssertThat(directory,
Equals((Bytes{0xe5} * 32) + createDirent("FILE2", 0, 1, {2})));
}
static void testMove()
{
auto sectors = std::make_shared<TestSectorInterface>();
auto fs = Filesystem::createCpmFsFilesystem(
globalConfig()->filesystem(), sectors);
fs->create(true, "volume");
fs->putFile(Path("0:FILE1"), Bytes{0x55} * 0x9000);
fs->putFile(Path("0:FILE2"), Bytes{5, 6, 7, 8});
fs->moveFile(Path("0:FILE1"), Path("1:FILE3"));
auto directory = getBlock(sectors, 0, 256).slice(0, 32 * 3);
AssertThat(directory,
Equals(createDirent("FILE3",
0,
0x80,
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16},
1) +
createDirent("FILE3", 2, 0x20, {17, 18}, 1) +
createDirent("FILE2", 0, 1, {19})));
}
static void testPutMetadata()
{
auto sectors = std::make_shared<TestSectorInterface>();
auto fs = Filesystem::createCpmFsFilesystem(
globalConfig()->filesystem(), sectors);
fs->create(true, "volume");
fs->putFile(Path("0:FILE1"), Bytes{0x55} * 0x9000);
fs->putFile(Path("0:FILE2"), Bytes{5, 6, 7, 8});
fs->putMetadata(Path("0:FILE1"),
std::map<std::string, std::string>{
{"mode", "SRA"}
});
auto directory = getBlock(sectors, 0, 256).slice(0, 32 * 3);
AssertThat(directory,
Equals(createDirent("FILE1 \xa0\xa0\xa0",
0,
0x80,
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) +
createDirent("FILE1 \xa0\xa0\xa0", 2, 0x20, {17, 18}) +
createDirent("FILE2", 0, 1, {19})));
}
#endif
int main(void)
{
try
@@ -124,7 +312,7 @@ int main(void)
layout {
format_type: FORMATTYPE_80TRACK
tracks: 10
tracks: 20
sides: 1
layoutdata {
sector_size: 256
@@ -148,6 +336,14 @@ int main(void)
testPartialExtent();
testLogicalExtents();
#if 0
testBitmap();
testPutGet();
testPutBigFile();
testDelete();
testMove();
testPutMetadata();
#endif
}
catch (const ErrorException& e)
{