From 13f1518a4f33ba3805a91cb6cfe03c57cd188224 Mon Sep 17 00:00:00 2001 From: Scott Alfter Date: Fri, 25 Oct 2024 13:19:24 -0700 Subject: [PATCH] local configuration --- .gitignore | 3 ++- docker-compose.yaml | 43 +++++++++++++++++++++++++++++++------------ htpasswd | 1 + nginx-conf | 1 + requirements.txt | 2 +- 5 files changed, 36 insertions(+), 14 deletions(-) create mode 120000 htpasswd create mode 120000 nginx-conf diff --git a/.gitignore b/.gitignore index 3af0e72..4126f07 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ modules/__pycache__/ models/ modules/yt_tmp.wav configs/default_parameters.yaml -__pycache__/ \ No newline at end of file +__pycache__/*~ +*~ diff --git a/docker-compose.yaml b/docker-compose.yaml index ae1b532..566b074 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,17 +1,26 @@ services: - app: + whisper: + container_name: whisper + networks: + - www build: . image: jhj0517/whisper-webui:latest +# labels: +# caddy: whisper.alfter.us +## caddy.basic_auth.salfter: $2a$14$IhrjNNbvLmLJRM0BSiV4mOikVZ68rsL6u9Uz2qxUdI6tFRo6f.K/C +# caddy.reverse_proxy: whisper.www:7860 + + volumes: # Update paths to mount models and output paths to your custom paths like this, e.g: # - C:/whisper-models/custom-path:/Whisper-WebUI/models # - C:/whisper-webui-outputs/custom-path:/Whisper-WebUI/outputs - - /Whisper-WebUI/models - - /Whisper-WebUI/outputs + - "whisper-models:/Whisper-WebUI/models" + - "/mnt/storage/documents/whisper:/Whisper-WebUI/outputs" - ports: - - "7860:7860" +# ports: +# - "7860:7860" stdin_open: true tty: true @@ -20,10 +29,20 @@ services: # If you're not using nvidia GPU, Update device to match yours. # See more info at : https://docs.docker.com/compose/compose-file/deploy/#driver - deploy: - resources: - reservations: - devices: - - driver: nvidia - count: all - capabilities: [ gpu ] \ No newline at end of file +# deploy: +# resources: +# reservations: +# devices: +# - driver: nvidia +# count: all +# capabilities: [ gpu ] + +networks: + www: + name: www + external: true + +volumes: + whisper-models: + name: whisper-models + external: true diff --git a/htpasswd b/htpasswd new file mode 120000 index 0000000..9e9fa38 --- /dev/null +++ b/htpasswd @@ -0,0 +1 @@ +../nginx/conf.d/whisper.htpasswd \ No newline at end of file diff --git a/nginx-conf b/nginx-conf new file mode 120000 index 0000000..2181335 --- /dev/null +++ b/nginx-conf @@ -0,0 +1 @@ +../nginx/conf.d/whisper.conf \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index a52ddc8..4e10e27 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # If you're using it, update url to your CUDA version (CUDA 12.1 is minimum requirement): # For CUDA 12.1, use : https://download.pytorch.org/whl/cu121 # For CUDA 12.4, use : https://download.pytorch.org/whl/cu124 ---extra-index-url https://download.pytorch.org/whl/cu124 +#--extra-index-url https://download.pytorch.org/whl/cu124 torch