Compare commits
19 Commits
2c53a7fe3a
...
btc
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9ef90db07b | ||
![]() |
458506e798 | ||
![]() |
874c759f85 | ||
![]() |
237b07e76c | ||
![]() |
3d5a5afe8d | ||
![]() |
53007a1c1b | ||
![]() |
d9db074e62 | ||
![]() |
84549cb8d1 | ||
![]() |
b3129bcb53 | ||
![]() |
3451147e66 | ||
![]() |
f26e1c518b | ||
![]() |
c1ae312ea3 | ||
![]() |
260a40ceb2 | ||
![]() |
9923a6a9e8 | ||
![]() |
8b9577920a | ||
![]() |
2a7b7ba7e9 | ||
![]() |
1f5685d93d | ||
![]() |
7518ed646c | ||
![]() |
1f15393381 |
@@ -7,6 +7,7 @@ docker_compose_directory_mask: 0775
|
||||
docker_compose_file_mask: 0664
|
||||
install_directory: /srv
|
||||
|
||||
vpn_listen_port: 61383
|
||||
|
||||
sonarr_api_key: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
@@ -26,4 +27,26 @@ radarr_api_key: !vault |
|
||||
66613735373430636437343135303238663534316632363534323130313934306536633862303632
|
||||
6431656565666466303837336430613062346331646432306164
|
||||
|
||||
vpn_listen_port: 61383
|
||||
slskd_api_key: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
39666166376666636338643866636262356665363135386336343030326539363163333963626163
|
||||
6666313861613930663433623933343439366562356563340a663639363566336435376436303762
|
||||
65613536356139356364653432643736366436333139666165383736323834656130393036306339
|
||||
3065646663366136320a383936643239343039656334336236623232313035633664303434383030
|
||||
66626663303961333936306661623339333634653433633035633734373261366231333365383062
|
||||
33633863366639326236373234326264616136656463353164353730376366373939643061306239
|
||||
36353234333363386563643066313861343633376236333162313737366536613066303633363566
|
||||
36633636653261343431356230666234626162333630333536633233313036326661373836333766
|
||||
39343331396465323261393563306631666632343330306637636365373963613430633236656539
|
||||
63663665386334306235336262656438396336633037363534666438373534623333333061646130
|
||||
653230663539386239396337306130373531
|
||||
|
||||
lidarr_api_key: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
64336163343037336539353364636235383462643538396534636630323266333730353636366235
|
||||
3938396634613464613639356466616365353738633433320a323333333339316332613266373939
|
||||
33613133653436316230623634323066383535653961386465643334396166353064666135343030
|
||||
3638613966616630390a613261336531343934393634316263303632353038643634663236666162
|
||||
62653835383739653661363962303862613338333865363430383335643635353037656537323034
|
||||
6164633230363736306565646233333466313362653738303065
|
||||
|
||||
|
@@ -1,37 +1,3 @@
|
||||
- hosts: pve-docker
|
||||
roles:
|
||||
- pve-docker
|
||||
- docker-compose
|
||||
- traefik
|
||||
- tautulli
|
||||
- searxng
|
||||
- prowlarr
|
||||
- sonarr
|
||||
- radarr
|
||||
- firefly3
|
||||
- overseerr
|
||||
- ntfy
|
||||
- gluetun
|
||||
- autobrr
|
||||
- name: qbittorrent
|
||||
tags: test
|
||||
- nextcloud
|
||||
- redlib
|
||||
- readarr
|
||||
- calibre
|
||||
- calibre-web
|
||||
- deemix
|
||||
- barassistant
|
||||
- mealie
|
||||
- recyclarr
|
||||
- firefly3
|
||||
- sabnzbd
|
||||
- unifi-controller
|
||||
- slskd
|
||||
- navidrome
|
||||
- lidarr
|
||||
- lubelogger
|
||||
- searxng
|
||||
- synapse
|
||||
- rimgo
|
||||
- gitea
|
||||
- qbittorrent
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,41 +1,36 @@
|
||||
plugins: fetchart embedart scrub replaygain lastgenre chroma web inline
|
||||
directory: /music
|
||||
library: /config/musiclibrary.blb
|
||||
art_filename: cover
|
||||
threaded: yes
|
||||
original_date: no
|
||||
per_disc_numbering: yes
|
||||
|
||||
plugins: fetchart web inline lyrics
|
||||
|
||||
paths:
|
||||
default: $albumartist/$album%aunique{}/%if{$multidisc,$disc}$track - $title
|
||||
singleton: Non-Album/$artist - $title
|
||||
comp: Various Artists/$album%aunique{}/%if{$multidisc,$disc}$track - $title
|
||||
albumtype_soundtrack: Soundtracks/$album/$track $title
|
||||
item_fields:
|
||||
multidisc: 1 if disctotal > 1 else 0 # Makes it so track number is preceded by disc number only if album contains multiple discs
|
||||
|
||||
import:
|
||||
write: yes
|
||||
copy: no
|
||||
move: yes
|
||||
hardlink: yes
|
||||
resume: ask
|
||||
incremental: yes
|
||||
quiet_fallback: skip
|
||||
timid: no
|
||||
log: /config/beet.log
|
||||
languages: "en jp es"
|
||||
|
||||
lastgenre:
|
||||
auto: yes
|
||||
source: album
|
||||
asciify_paths: yes
|
||||
|
||||
embedart:
|
||||
auto: yes
|
||||
|
||||
fetchart:
|
||||
auto: yes
|
||||
high_resolution: yes
|
||||
|
||||
replaygain:
|
||||
auto: no
|
||||
lyrics:
|
||||
sources: [lrclib, genius]
|
||||
|
||||
scrub:
|
||||
auto: yes
|
||||
|
@@ -1,5 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
external: true
|
||||
|
11
ansible/roles/btc/tasks/bitcoin.yml
Normal file
11
ansible/roles/btc/tasks/bitcoin.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
- name: Install bitcoin daemon
|
||||
ansible.builtin.package:
|
||||
name: bitcoin-daemon
|
||||
state: present
|
||||
become: true
|
||||
|
||||
- name: Enable bitcoind
|
||||
ansible.builtin.service:
|
||||
name: bitcoind
|
||||
state: started
|
||||
enabled: yes
|
6
ansible/roles/btc/tasks/tor.yml
Normal file
6
ansible/roles/btc/tasks/tor.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
- name: Install tor
|
||||
ansible.builtin.package:
|
||||
name: tor
|
||||
state: present
|
||||
|
||||
|
32
ansible/roles/btc/tasks/ufw.yml
Normal file
32
ansible/roles/btc/tasks/ufw.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
- name: Install Uncomplicated Firewall
|
||||
ansible.builtin.package:
|
||||
name: ufw
|
||||
state: present
|
||||
|
||||
# UFW logging can full up the kernel (dmesg) and message logs
|
||||
- name: Disable logging
|
||||
community.general.ufw:
|
||||
logging: 'off'
|
||||
|
||||
- name: Allow OpenSSH inbound
|
||||
community.general.ufw:
|
||||
rule: allow
|
||||
name: OpenSSH # Uses standard profile located in /etc/ufw/applications.d
|
||||
|
||||
- name: Apply rate limiting to ssh inbound
|
||||
community.general.ufw:
|
||||
rule: limit
|
||||
port: ssh
|
||||
proto: tcp
|
||||
|
||||
- name: Enable ufw system service
|
||||
ansible.builtin.service:
|
||||
name: ufw
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
# This is necessary in addition to enabling the system service
|
||||
- name: Enable ufw rules
|
||||
community.general.ufw:
|
||||
state: enabled
|
||||
policy: deny
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
firefly_iii:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,10 +1,3 @@
|
||||
- name: Create service user
|
||||
user:
|
||||
name: "{{ role_name }}"
|
||||
system: true
|
||||
register: service_user
|
||||
become: true
|
||||
|
||||
- name: Create install directory
|
||||
file:
|
||||
path: "{{ install_directory }}/{{ role_name }}"
|
@@ -1,5 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
external: true
|
||||
@@ -7,16 +5,21 @@ networks:
|
||||
services:
|
||||
{{ role_name }}:
|
||||
container_name: "{{ role_name }}"
|
||||
image: lscr.io/linuxserver/overseerr:latest
|
||||
image: fallenbagel/jellyseerr:latest
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- traefik
|
||||
environment:
|
||||
- "PUID={{ service_user.uid }}"
|
||||
- "PGID={{ service_user.uid }}"
|
||||
- LOG_LEVEL=debug
|
||||
- "TZ={{ timezone }}"
|
||||
volumes:
|
||||
- "{{ data_dir }}/{{ role_name }}:/config"
|
||||
- "{{ data_dir }}/{{ role_name }}:/app/config"
|
||||
healthcheck:
|
||||
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
|
||||
start_period: 20s
|
||||
timeout: 3s
|
||||
interval: 15s
|
||||
retries: 3
|
||||
labels:
|
||||
traefik.enable: true
|
||||
traefik.http.routers.{{ role_name }}.rule: "Host(`requests.{{ personal_domain }}`)"
|
5
ansible/roles/lidarr/files/scripts_init.bash
Normal file
5
ansible/roles/lidarr/files/scripts_init.bash
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
set -euo pipefail
|
||||
|
||||
curl -sfL https://raw.githubusercontent.com/RandomNinjaAtk/arr-scripts/main/lidarr/setup.bash | bash
|
||||
exit
|
@@ -24,6 +24,32 @@
|
||||
validate: docker compose -f %s config
|
||||
become: true
|
||||
|
||||
- name: Create data directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ data_dir }}/{{ item }}"
|
||||
state: directory
|
||||
owner: "{{ service_user.uid }}"
|
||||
group: "{{ primary_gid }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
become: true
|
||||
loop:
|
||||
- "{{ role_name }}/config"
|
||||
- "{{ role_name }}/custom-services"
|
||||
- "{{ role_name }}/custom-cont"
|
||||
|
||||
- name: Copy arr-scripts init script
|
||||
ansible.builtin.copy:
|
||||
src: "scripts_init.bash"
|
||||
dest: "{{ data_dir }}/{{ role_name }}/custom-cont"
|
||||
owner: "{{ service_user.uid }}"
|
||||
mode: 0564
|
||||
become: true
|
||||
|
||||
- name: Copy extended.conf
|
||||
ansible.builtin.template:
|
||||
src: "extended.conf"
|
||||
dest: "{{ data_dir }}/{{ role_name }}/config"
|
||||
|
||||
- name: Start docker container
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/{{ role_name }}"
|
||||
|
@@ -1,5 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
external: true
|
||||
@@ -17,8 +15,11 @@ services:
|
||||
- "TZ={{ timezone }}"
|
||||
- "UMASK=002"
|
||||
volumes:
|
||||
- "{{ data_dir }}/{{ role_name }}:/config"
|
||||
- "{{ data_dir }}/{{ role_name }}/config:/config"
|
||||
- "{{ media_storage_mnt }}/data:/data"
|
||||
# arr-scripts - https://github.com/RandomNinjaAtk/arr-scripts/blob/main/lidarr/readme.md
|
||||
- "{{ data_dir }}/{{ role_name }}/custom-services:/custom-services.d"
|
||||
- "{{ data_dir }}/{{ role_name }}/custom-cont:/custom-cont-init.d"
|
||||
labels:
|
||||
traefik.enable: true
|
||||
traefik.http.routers.{{ role_name }}.rule: "Host(`{{ role_name }}.local.{{ personal_domain }}`)"
|
||||
|
89
ansible/roles/lidarr/templates/extended.conf
Normal file
89
ansible/roles/lidarr/templates/extended.conf
Normal file
@@ -0,0 +1,89 @@
|
||||
##### LIDARR EXTENDED SCRIPTS SETTINGS #####
|
||||
|
||||
##### PATHS
|
||||
downloadPath="/config/extended/downloads" # OPTIONAL, only change if needed.... Temporary Download Location for incoming Audio files from included clients
|
||||
importPath="/config/extended/import" # OPTIONAL, only change if needed.... Completed downloads are stored here for the Usenet Blackhole client to import from. When Using queue cleaner, failed imports will be automatically cleaned up.
|
||||
videoPath="" # Final destination/location of completed Videos
|
||||
|
||||
##### SCRIPT ENABLEMENT
|
||||
enableAutoConfig="true" # true = enabled :: Automatically configures Lidarr with optimal settings
|
||||
enableAudio="true" # true = enabled :: Enables the Audio script to run automatically
|
||||
enableVideo="false" # true = enabled :: Enables the Video script to run automatically
|
||||
enableUnmappedFilesCleaner="false" # true = enabled :: Enables the UnmappedFilesCleaner script to run automatically
|
||||
enableQueueCleaner="true" # true = enabled :: Enables the QueueCleaner script to run automatically
|
||||
|
||||
##### SCRIPT INTERVALS
|
||||
audioScriptInterval=15m #s or #m or #h or #d :: s = seconds, m = minutes, h = hours, d = days :: Amount of time between each script run, when script is enabled
|
||||
videoScriptInterval=15m #s or #m or #h or #d :: s = seconds, m = minutes, h = hours, d = days :: Amount of time between each script run, when script is enabled
|
||||
autoArtistAdderInterval=12h #s or #m or #h or #d :: s = seconds, m = minutes, h = hours, d = days :: Amount of time between each script run, when script is enabled
|
||||
unmappedFolderCleanerScriptInterval=15m #s or #m or #h or #d :: s = seconds, m = minutes, h = hours, d = days :: Amount of time between each script run, when script is enabled
|
||||
queueCleanerScriptInterval=15m #s or #m or #h or #d :: s = seconds, m = minutes, h = hours, d = days :: Amount of time between each script run, when script is enabled
|
||||
|
||||
##### AUTOCONFIG SCRIPT SETTINGS
|
||||
configureMediaManagement="true" # Configures Lidarr Media Management Settings
|
||||
configureMetadataConsumerSettings="false" # Configures Lidarr Metadata Consumer Settings
|
||||
configureMetadataProviderSettings="false" # Configures Lidarr Metadata Provider Settings
|
||||
configureCustomScripts="true" # Adds custom scripts to Lidarr
|
||||
configureLidarrUiSettings="true" # Configures Lidarr UI Settings
|
||||
configureMetadataProfileSettings="false" # Configures Lidarr Metadata Profile Settings
|
||||
configureTrackNamingSettings="true" # Configures Lidarr Track Naming Settings
|
||||
|
||||
##### AUDIO SCRIPT
|
||||
# SEARCH
|
||||
searchSort="date" # date or album :: Sorts the missing/cutoff list by release date (newest -> oldest) or album type (album -> single) for processing the list
|
||||
matchDistance="3" # The number of changes required to transform the searched album title into a possible found album title match... (0, requires a perfect match)
|
||||
lidarrSearchForMissing="false" # true = enabled :: When artists are added, search for them using Lidarr's built in functionality
|
||||
retryNotFound="90" ## :: Number of days between re-attempting the download of previously notfound albums
|
||||
ignoreInstrumentalRelease="false" # true = enabled :: Skips albums releases that have the word "instrumental" in either the title or disambiguation
|
||||
preferSpecialEditions="true" # true = enabled :: This changes the release title sort order, when true, it will search for releases with longest release names first, which are more likely to be deluxe/special versions...
|
||||
|
||||
# CLIENT
|
||||
dlClientSource="deezer" # deezer, tidal or both :: set to both, to use both clients, tidal requires extra steps, view logging output
|
||||
arlToken="{{ deezer_arl }}" # OPTIONAL (fallback using Freyr) - User ARL token for deemix client
|
||||
tidalCountryCode="US" # Country Code required for tidal
|
||||
failedDownloadAttemptThreshold="6" # Maximum number of attempts to download a particular album ID from the service before considering it a failed download...
|
||||
deezerClientTestDownloadId="197472472" # A known good deezer track id that is used to test/verify the client is functioning in the event all download attempts have failed. Prevents the script from exiting...
|
||||
tidalClientTestDownloadId="166356219" # A known good tidal track id that is used to test/verify the client is functioning in the event all download attempts have failed. Prevents the script from exiting...
|
||||
downloadClientTimeOut="10m" # 's' for seconds (the default), 'm' for minutes, 'h' for hours or 'd' for days :: Kills freyr download client after the set timeout period, prevents lock ups...
|
||||
|
||||
# QUALITY
|
||||
audioLyricType="both" # both or explicit or clean :: both, is explicit preferred matching, explicit is explicit releases only matching and clean is clean releases only matching
|
||||
audioFormat="native" # native or alac or mp3 or aac or opus :: native is the native download client file type, selected by the matching audio bit-rate
|
||||
audioBitrate="lossless" # master or lossless or high or low or ### :: master = MQA/lossless flac files, lossless = flac files, high = 320K, low = 128k/96k, ### = the output bit-rate of converted lossless files to selected audioFormat that is not native, example: 192...
|
||||
requireQuality="true" # true = enabled :: Downloads will be checked for quality and require to have the requested file format & quality
|
||||
|
||||
# POST PROCESSING
|
||||
enableBeetsTagging="true" # true = enabled :: Downloads will be processed and tagged by Beets
|
||||
beetsMatchPercentage="90" # 1-100 :: Set this to the minimum percentage required for Beets to match the downloaded album to a musicbrainz release :: Lower percentage is less restrictive
|
||||
enableReplaygainTags="true" # true = enabled :: Downloads will be tagged with ReplayGain Metadata
|
||||
|
||||
# ADD ARTIST AUTOMATION
|
||||
addDeezerTopArtists="false" # true = enabled :: Enabling this will enable the extended script to automatically add artists that are on the Deezer Top Artist Chart to your existing Lidarr instance
|
||||
addDeezerTopAlbumArtists="false" # true = enabled :: Enabling this will enable the extended script to automatically add artists that are on the Deezer Top Album Chart to your existing Lidarr instance
|
||||
addDeezerTopTrackArtists="false" # true = enabled :: Enabling this will enable the extended script to automatically add artists that are on the Deezer Top Track Chart to your existing Lidarr instance
|
||||
topLimit="3" # This setting controls the amount of Top Artist (Albums/Tracks/Artists) to add to Lidarr from Deezer
|
||||
addRelatedArtists="false" # true = enabled :: WARNING !!! WARNING !!! Enabling this can cause an endless loop of additional artists.... Enabling this will enable the extended script to automatically add artists that are related to your existing Lidarr artists from Tidal & Deezer
|
||||
numberOfRelatedArtistsToAddPerArtist="1" # 1-20 :: This will limit the number of related artists to add per artist in your library :: Minimum is 1, Maximum is 20
|
||||
autoArtistAdderMonitored="true" # true or false :: If true, artists that are added automatically are monitored, when false they are added in an unmonitored state
|
||||
preventRelatedArtistsLoop="true" # true = enabled :: This will prevent added related artists from adding additional related artists, thus killing an endless loop. This works by using the Lidarr's tag system (arr-extended), Remove tag from artist to allow it to be proessed for related artists...
|
||||
|
||||
##### VIDEO SCRIPT
|
||||
addFeaturedVideoArtists="false" # true = enabled :: WARNING !!! WARNING !!! Enabling this can cause an endless loop of additional artists.... Enabling this will enable the extended Video script to automatically add Music Video Featured Artists to your existing Lidarr artists from IMVDB
|
||||
videoFormat="bv[width>=1920]+ba" # ONLY CHANGE if you know what your doing, for guidence, please see yt-dlp documentation.
|
||||
videoInfoJson="false" # true = enabled :: Stores yt-dlp info-json files alongside video files.
|
||||
youtubeSubtitleLanguage="en" # Desired Language Code :: For guidence, please see yt-dlp documentation.
|
||||
disableImvd="false" # true = enabled :: Use this to disable IMVDB as a source, will result in only tidal videos
|
||||
|
||||
##### DEEZER ARLCHECKER
|
||||
arlUpdateInterval="24h" # Interval to check ARL Validity (default 24 hours). Reboot container after changing. s = seconds, m = minutes, h = hours, d = days
|
||||
telegramBotEnable="false" # Enable/Disable Telegram Bot to notify if ARL expires. Otherwise check text file in custom-services.d/python for status.
|
||||
telegramBotToken="" # Get token from BotFather during bot creation. If you use a notify channel for Lidarr, you can probably use the same bot, as this script only takes temporary control.
|
||||
telegramUserChatID="" # Get your userid by chatting: t.me/userinfobot
|
||||
pushoverEnable="false" # Pushover Notification Support
|
||||
pushoverUserKey=""
|
||||
pushoverAppAPIKey=""
|
||||
ntfyEnable="false" # ntfy Notification Support
|
||||
ntfyServerTopic="" # Put your server + topic address ex: https://ntfy.mydomain.com/lidarr
|
||||
ntfyUserToken="" # create a new user token using the ntfy cli commands. See ntfy documentation on how to do that.
|
||||
|
||||
|
17
ansible/roles/lidarr/vars/main.yml
Normal file
17
ansible/roles/lidarr/vars/main.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
deezer_arl: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
30376434343866323037636662633839313731316533636463383031353462346337626337376563
|
||||
3334356436383831353265663266666631356131386139310a333066363932643363386139396437
|
||||
62333561623365336163663062663035383335336465333031306165633539343663333462313362
|
||||
6634396266386466340a336162626636633531363738353561363030316133386163316135613538
|
||||
64326563336665333566616163366263663763313764613636643931393862333166333332323139
|
||||
65653234663836346538636264613637346662636566666364393165313863333939326538663632
|
||||
38363438356234363639373162633762383139373838376137366333663166326165616663393039
|
||||
34343863666163393164653937663931626434626562306333313665306332313865636462613030
|
||||
36343930323861366235636535646662643561666663656165303031346631396239623531386365
|
||||
39633063363934316637663139396461326533626362363730613562306163663163306665623233
|
||||
38323763653064303631303566653736333237643031626266343036333938636635653132636661
|
||||
64636438653933363335343431383162366636393834396639363266653963316431333138353638
|
||||
61656535363734303139323964303336353032663635366131313866373033666662393130393132
|
||||
3637393263373761646366376532613761613039633465633432
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
- name: restart qbittorrent
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/qbittorrent"
|
||||
restarted: true
|
||||
state: restarted
|
||||
|
@@ -4,7 +4,6 @@ module.exports = {
|
||||
delay: 30,
|
||||
|
||||
torznab: [
|
||||
"http://prowlarr.local.{{ personal_domain }}/1/api?apikey={{ prowlarr_api_key }}", // TorrentLeech
|
||||
"http://prowlarr.local.{{ personal_domain }}/3/api?apikey={{ prowlarr_api_key }}", // ImmortalSeed
|
||||
"http://prowlarr.local.{{ personal_domain }}/6/api?apikey={{ prowlarr_api_key }}", // Aither
|
||||
"http://prowlarr.local.{{ personal_domain }}/7/api?apikey={{ prowlarr_api_key }}", // Reelflix
|
||||
@@ -14,6 +13,8 @@ module.exports = {
|
||||
"http://prowlarr.local.{{ personal_domain }}/16/api?apikey={{ prowlarr_api_key }}", // AlphaRatio
|
||||
"http://prowlarr.local.{{ personal_domain }}/17/api?apikey={{ prowlarr_api_key }}", // FileList
|
||||
"http://prowlarr.local.{{ personal_domain }}/19/api?apikey={{ prowlarr_api_key }}", // Cathode-Ray.Tube
|
||||
"http://prowlarr.local.{{ personal_domain }}/25/api?apikey={{ prowlarr_api_key }}", // seedpool
|
||||
"http://prowlarr.local.{{ personal_domain }}/27/api?apikey={{ prowlarr_api_key }}", // Upload.cx
|
||||
],
|
||||
|
||||
outputDir: null,
|
||||
|
@@ -1,5 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
starr:
|
||||
external: true
|
||||
@@ -7,7 +5,7 @@ networks:
|
||||
services:
|
||||
qbittorrent:
|
||||
container_name: qbittorrent
|
||||
image: lscr.io/linuxserver/qbittorrent:5.0.4
|
||||
image: lscr.io/linuxserver/qbittorrent:5.1.2
|
||||
restart: unless-stopped
|
||||
network_mode: "container:gluetun"
|
||||
environment:
|
||||
|
@@ -21,6 +21,8 @@ settings:
|
||||
tracker_error_tag: issue # Will set the tag of any torrents that do not have a working tracker.
|
||||
nohardlinks_tag: noHL
|
||||
share_limits_tag: ~share_limit
|
||||
share_limits_min_seeding_time_tag: MinSeedTimeNotReached
|
||||
|
||||
directory:
|
||||
root_dir: /data/torrents
|
||||
torrents_dir: /torrents
|
||||
@@ -75,6 +77,14 @@ tracker:
|
||||
tag: CRT
|
||||
t-ru.org:
|
||||
tag: rutracker
|
||||
seedpool|seedpool.org:
|
||||
tag: seedpool
|
||||
upload.cx:
|
||||
tag: ULCX
|
||||
archlinux:
|
||||
tag: archlinux
|
||||
other:
|
||||
tag: other
|
||||
|
||||
nohardlinks:
|
||||
- movies
|
||||
@@ -92,6 +102,20 @@ share_limits:
|
||||
max_seeding_time: 28800 # 20 days
|
||||
cleanup: true
|
||||
add_group_to_tag: false
|
||||
isos:
|
||||
priority: 8
|
||||
include_any_tags:
|
||||
- archlinux
|
||||
max_seeding_time: 129600 # 90 days
|
||||
cleanup: true
|
||||
add_group_to_tag: false
|
||||
tl_cross-seed:
|
||||
priority: 9
|
||||
include_all_tags:
|
||||
- cross-seed
|
||||
- TorrentLeech
|
||||
limit_upload_speed: 100 # 100 KiBps
|
||||
add_group_to_tag: false
|
||||
public_trackers:
|
||||
priority: 10
|
||||
include_any_tags:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -17,7 +17,6 @@ custom_formats:
|
||||
- ec8fa7296b64e8cd390a1600981f3923 # Repack/Proper
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 026d5aadd1a6b4e550b134cb6c72b3ca # Uncensored
|
||||
- 273bd326df95955e1b6c26527d1df89b # v1
|
||||
- 228b8ee9aa0a609463efca874524a6b8 # v2
|
||||
|
@@ -19,7 +19,6 @@ custom_formats:
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- b735f09d3c025cbb7d75a5d38325b73b # Remaster
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 3bc5f395426614e155e585a2f056cdf1 # Season Pack
|
||||
|
||||
# Streaming Services
|
||||
|
@@ -35,3 +35,4 @@ quality_profiles:
|
||||
qualities:
|
||||
- WEBDL-480p
|
||||
- WEBRip-480p
|
||||
- name: SDTV
|
||||
|
@@ -19,7 +19,6 @@ custom_formats:
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- b735f09d3c025cbb7d75a5d38325b73b # Remaster
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 3bc5f395426614e155e585a2f056cdf1 # Season Pack
|
||||
|
||||
# Streaming Services
|
||||
|
@@ -35,3 +35,4 @@ quality_profiles:
|
||||
qualities:
|
||||
- WEBDL-480p
|
||||
- WEBRip-480p
|
||||
- name: SDTV
|
||||
|
@@ -19,7 +19,6 @@ custom_formats:
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- b735f09d3c025cbb7d75a5d38325b73b # Remaster
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 3bc5f395426614e155e585a2f056cdf1 # Season Pack
|
||||
|
||||
# Streaming Services
|
||||
|
@@ -19,7 +19,6 @@ custom_formats:
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- b735f09d3c025cbb7d75a5d38325b73b # Remaster
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 3bc5f395426614e155e585a2f056cdf1 # Season Pack
|
||||
|
||||
# Streaming Services
|
||||
|
@@ -19,7 +19,6 @@ custom_formats:
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- b735f09d3c025cbb7d75a5d38325b73b # Remaster
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 3bc5f395426614e155e585a2f056cdf1 # Season Pack
|
||||
|
||||
# Streaming Services
|
||||
|
@@ -19,7 +19,6 @@ custom_formats:
|
||||
- eb3d5cc0a2be0db205fb823640db6a3c # Repack v2
|
||||
- 44e7c4de10ae50265753082e5dc76047 # Repack v3
|
||||
- b735f09d3c025cbb7d75a5d38325b73b # Remaster
|
||||
- d7c747094a7c65f4c2de083c24899e8b # FreeLeech
|
||||
- 3bc5f395426614e155e585a2f056cdf1 # Season Pack
|
||||
|
||||
# Streaming Services
|
||||
|
@@ -1,4 +1,4 @@
|
||||
- name: restart recyclarr
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/{{ role_name }}"
|
||||
restarted: true
|
||||
state: restarted
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
starr:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
docker-socket-proxy:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
- name: restart service
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/{{ role_name }}"
|
||||
restarted: true
|
||||
state: restarted
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
services:
|
||||
slskd:
|
||||
@@ -9,7 +8,7 @@ services:
|
||||
user: "{{ service_user.uid }}:{{ media_gid }}"
|
||||
volumes:
|
||||
- "{{ data_dir }}/{{ role_name }}:/app"
|
||||
- "{{ media_storage_mnt }}/data/import/music/soulseek:/downloads"
|
||||
- "{{ media_storage_mnt }}/data/ddl/soulseek:/downloads"
|
||||
- "{{ media_storage_mnt }}/data/media/music:/music:ro"
|
||||
labels:
|
||||
traefik.enable: true
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# debug: false
|
||||
remote_configuration: true
|
||||
remote_configuration: false
|
||||
|
||||
directories:
|
||||
downloads: /downloads
|
||||
@@ -34,3 +34,8 @@ web:
|
||||
port: 5030
|
||||
url_base: /
|
||||
logging: false
|
||||
authentication:
|
||||
api_keys:
|
||||
my_api_key:
|
||||
key: "{{ slskd_api_key }}"
|
||||
#cidr: 10.0.0.0/24,172.16.0.0/12
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
4
ansible/roles/soularr/handlers/main.yml
Normal file
4
ansible/roles/soularr/handlers/main.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
- name: restart service
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/{{ role_name }}"
|
||||
state: restarted
|
44
ansible/roles/soularr/tasks/main.yml
Normal file
44
ansible/roles/soularr/tasks/main.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
- name: Create service user
|
||||
user:
|
||||
name: "{{ role_name }}"
|
||||
system: true
|
||||
register: service_user
|
||||
become: true
|
||||
|
||||
- name: Create install directory
|
||||
file:
|
||||
path: "{{ install_directory }}/{{ role_name }}"
|
||||
state: directory
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
become: true
|
||||
|
||||
- name: Create config directory
|
||||
file:
|
||||
path: "{{ data_dir }}/{{ role_name }}"
|
||||
state: directory
|
||||
owner: "{{ service_user.uid }}"
|
||||
group: "{{ media_gid }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
become: true
|
||||
|
||||
- name: Copy docker-compose file to destination
|
||||
template:
|
||||
src: docker-compose.yml
|
||||
dest: "{{ install_directory }}/{{ role_name }}/docker-compose.yml"
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_file_mask }}"
|
||||
validate: docker compose -f %s config
|
||||
become: true
|
||||
|
||||
- name: Install config.ini
|
||||
ansible.builtin.template:
|
||||
src: config.ini
|
||||
dest: "{{ data_dir }}/{{ role_name }}/config.ini"
|
||||
notify: restart service
|
||||
|
||||
- name: Start docker container
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/{{ role_name }}"
|
||||
pull: always
|
||||
remove_orphans: yes
|
44
ansible/roles/soularr/templates/config.ini
Normal file
44
ansible/roles/soularr/templates/config.ini
Normal file
@@ -0,0 +1,44 @@
|
||||
[Lidarr]
|
||||
api_key = {{ lidarr_api_key }}
|
||||
host_url = https://lidarr.local.{{ personal_domain }}
|
||||
# This is the soulseek downloads directory as lidarr sees it
|
||||
download_dir = /data/ddl/soulseek
|
||||
disable_sync = False
|
||||
|
||||
[Slskd]
|
||||
api_key = {{ slskd_api_key }}
|
||||
host_url = https://soulseek.local.{{ personal_domain }}
|
||||
url_base = /
|
||||
download_dir = /downloads
|
||||
delete_searches = False
|
||||
stalled_timeout = 3600
|
||||
|
||||
[Release Settings]
|
||||
use_most_common_tracknum = True
|
||||
allow_multi_disc = True
|
||||
accepted_countries = Europe,Japan,United Kingdom,United States,[Worldwide],Australia,Canada
|
||||
skip_region_check = False
|
||||
accepted_formats = CD,Digital Media,Vinyl
|
||||
|
||||
[Search Settings]
|
||||
search_timeout = 5000
|
||||
maximum_peer_queue = 50
|
||||
minimum_peer_upload_speed = 0
|
||||
minimum_filename_match_ratio = 0.8
|
||||
allowed_filetypes = flac 24/192,flac 16/44.1,flac
|
||||
ignored_users = User1,User2,Fred,Bob
|
||||
search_for_tracks = True
|
||||
album_prepend_artist = False
|
||||
track_prepend_artist = True
|
||||
search_type = incrementing_page
|
||||
number_of_albums_to_grab = 10
|
||||
remove_wanted_on_failure = False
|
||||
title_blacklist = BlacklistWord1,blacklistword2
|
||||
search_source = missing
|
||||
|
||||
[Logging]
|
||||
level = INFO
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||
format = [%(levelname)s|%(module)s|L%(lineno)d] %(asctime)s: %(message)s
|
||||
# https://docs.python.org/3/library/time.html#time.strftime
|
||||
datefmt = %Y-%m-%dT%H:%M:%S%z
|
11
ansible/roles/soularr/templates/docker-compose.yml
Normal file
11
ansible/roles/soularr/templates/docker-compose.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
services:
|
||||
soularr:
|
||||
container_name: "soularr"
|
||||
image: mrusse08/soularr:latest
|
||||
restart: unless-stopped
|
||||
user: "{{ service_user.uid }}:{{ media_gid }}"
|
||||
environment:
|
||||
- "TZ={{ timezone }}"
|
||||
volumes:
|
||||
- "{{ data_dir }}/{{ role_name }}:/data"
|
||||
- "{{ media_storage_mnt }}/data/ddl/soulseek:/downloads"
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
@@ -1,4 +1,3 @@
|
||||
version: "{{ docker_compose_version }}"
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
28
ansible/roles/wger/files/nginx.conf
Normal file
28
ansible/roles/wger/files/nginx.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
upstream wger {
|
||||
server web:8000;
|
||||
}
|
||||
|
||||
server {
|
||||
|
||||
listen 80;
|
||||
|
||||
location / {
|
||||
proxy_pass http://wger;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
|
||||
proxy_set_header X-Forwarded-Host $host:$server_port;
|
||||
proxy_redirect off;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
alias /wger/static/;
|
||||
}
|
||||
|
||||
location /media/ {
|
||||
alias /wger/media/;
|
||||
}
|
||||
|
||||
# Increase max body size to allow for video uploads
|
||||
client_max_body_size 100M;
|
||||
}
|
2359
ansible/roles/wger/files/redis.conf
Normal file
2359
ansible/roles/wger/files/redis.conf
Normal file
File diff suppressed because it is too large
Load Diff
60
ansible/roles/wger/tasks/main.yml
Normal file
60
ansible/roles/wger/tasks/main.yml
Normal file
@@ -0,0 +1,60 @@
|
||||
- name: Create install directory
|
||||
file:
|
||||
path: "{{ install_directory }}/{{ role_name }}"
|
||||
state: directory
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
become: true
|
||||
|
||||
- name: Create database data directory
|
||||
file:
|
||||
path: "{{ data_dir }}/postgres/{{ role_name }}"
|
||||
state: directory
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
become: true
|
||||
|
||||
- name: Create redis data directory
|
||||
file:
|
||||
path: "{{ data_dir }}/redis/{{ role_name }}"
|
||||
state: directory
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
become: true
|
||||
|
||||
- name: Create data and config directories
|
||||
file:
|
||||
path: "{{ data_dir }}/{{ role_name }}/{{ item }}"
|
||||
state: directory
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_directory_mask }}"
|
||||
loop:
|
||||
- config
|
||||
- static
|
||||
- media
|
||||
- celery-beat
|
||||
|
||||
- name: Install wger config file (templatized)
|
||||
template:
|
||||
src: prod.env
|
||||
dest: "{{ data_dir }}/{{ role_name }}/config/prod.env"
|
||||
|
||||
- name: Install config files
|
||||
copy:
|
||||
src: ./
|
||||
dest: "{{ data_dir }}/{{ role_name }}/config"
|
||||
|
||||
- name: Copy docker-compose file to destination
|
||||
template:
|
||||
src: docker-compose.yml
|
||||
dest: "{{ install_directory }}/{{ role_name }}/docker-compose.yml"
|
||||
owner: "{{ docker_user }}"
|
||||
mode: "{{ docker_compose_file_mask }}"
|
||||
validate: docker compose -f %s config
|
||||
become: true
|
||||
|
||||
- name: Start docker container
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ install_directory }}/{{ role_name }}"
|
||||
pull: always
|
||||
remove_orphans: yes
|
132
ansible/roles/wger/templates/docker-compose.yml
Normal file
132
ansible/roles/wger/templates/docker-compose.yml
Normal file
@@ -0,0 +1,132 @@
|
||||
networks:
|
||||
traefik:
|
||||
external: true
|
||||
|
||||
services:
|
||||
web:
|
||||
image: wger/server:latest
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
cache:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- default
|
||||
env_file:
|
||||
- {{ data_dir }}/{{ role_name }}/config/prod.env
|
||||
volumes:
|
||||
- {{ data_dir }}/{{ role_name }}/static:/home/wger/static
|
||||
- {{ data_dir }}/{{ role_name }}/media:/home/wger/media
|
||||
expose:
|
||||
- 8000
|
||||
healthcheck:
|
||||
test: wget --no-verbose --tries=1 --spider http://localhost:8000
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
start_period: 300s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
nginx:
|
||||
image: nginx:stable
|
||||
depends_on:
|
||||
- web
|
||||
networks:
|
||||
- traefik
|
||||
- default
|
||||
volumes:
|
||||
- {{ data_dir }}/{{ role_name }}/config/nginx.conf:/etc/nginx/conf.d/default.conf
|
||||
- {{ data_dir }}/{{ role_name }}/static:/wger/static:ro
|
||||
- {{ data_dir }}/{{ role_name }}/media:/wger/media:ro
|
||||
healthcheck:
|
||||
test: service nginx status
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
restart: unless-stopped
|
||||
labels:
|
||||
traefik.enable: true
|
||||
traefik.http.routers.wger.rule: "Host(`wger.{{ personal_domain }}`)"
|
||||
#traefik.http.services.wger.loadbalancer.server.port: 8000
|
||||
#traefik.http.routers.wger.middlewares: lan-whitelist@file
|
||||
|
||||
db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_USER=wger
|
||||
- POSTGRES_PASSWORD=wger
|
||||
- POSTGRES_DB=wger
|
||||
- "TZ={{ timezone }}"
|
||||
networks:
|
||||
- default
|
||||
volumes:
|
||||
- {{ data_dir }}/postgres/{{ role_name }}:/var/lib/postgresql/data/
|
||||
expose:
|
||||
- 5432
|
||||
healthcheck:
|
||||
test: pg_isready -U wger
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
restart: unless-stopped
|
||||
|
||||
cache:
|
||||
image: redis
|
||||
expose:
|
||||
- 6379
|
||||
networks:
|
||||
- default
|
||||
volumes:
|
||||
- {{ data_dir }}/{{ role_name }}/config/redis.conf:/usr/local/etc/redis/redis.conf
|
||||
- {{ data_dir }}/redis/{{ role_name }}/data:/data
|
||||
command: [ "redis-server", "/usr/local/etc/redis/redis.conf"]
|
||||
healthcheck:
|
||||
test: redis-cli ping
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
restart: unless-stopped
|
||||
|
||||
# You probably want to limit the memory usage of the cache, otherwise it might
|
||||
# hog all the available memory. Remove or change according to your needs.
|
||||
#mem_limit: 2gb
|
||||
|
||||
celery_worker:
|
||||
image: wger/server:latest
|
||||
command: /start-worker
|
||||
networks:
|
||||
- default
|
||||
env_file:
|
||||
- {{ data_dir }}/{{ role_name }}/config/prod.env
|
||||
volumes:
|
||||
- {{ data_dir }}/{{ role_name }}/media:/home/wger/media
|
||||
depends_on:
|
||||
web:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: celery -A wger inspect ping
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
|
||||
celery_beat:
|
||||
image: wger/server:latest
|
||||
command: /start-beat
|
||||
networks:
|
||||
- default
|
||||
volumes:
|
||||
- {{ data_dir }}/{{ role_name }}/celery-beat:/home/wger/beat/
|
||||
env_file:
|
||||
- {{ data_dir }}/{{ role_name }}/config/prod.env
|
||||
depends_on:
|
||||
celery_worker:
|
||||
condition: service_healthy
|
||||
|
||||
|
||||
# Heads up, if you remove these volumes and use folders directly you need to chown them
|
||||
# to the UID and GID 1000 even if it doesn't exist on your system. Also, they should
|
||||
# be readable by everyone.
|
172
ansible/roles/wger/templates/prod.env
Normal file
172
ansible/roles/wger/templates/prod.env
Normal file
@@ -0,0 +1,172 @@
|
||||
SECRET_KEY="{{ DJANGO_SECRET }}"
|
||||
SIGNING_KEY="{{ JWT_SECRET }}"
|
||||
|
||||
TIME_ZONE=America/New_York
|
||||
TZ=America/New_York
|
||||
|
||||
CSRF_TRUSTED_ORIGINS="https://wger.{{ personal_domain }}"
|
||||
X_FORWARDED_PROTO_HEADER_SET=True
|
||||
|
||||
MEDIA_URL="https://wger.{{ personal_domain }}/media/"
|
||||
STATIC_URL="https://wger.{{ personal_domain }}/static/"
|
||||
|
||||
#
|
||||
# These settings usually don't need changing
|
||||
#
|
||||
|
||||
#
|
||||
# Application
|
||||
WGER_INSTANCE=https://wger.de # Wger instance from which to sync exercises, images, etc.
|
||||
ALLOW_REGISTRATION=True
|
||||
ALLOW_GUEST_USERS=True
|
||||
ALLOW_UPLOAD_VIDEOS=True
|
||||
# Users won't be able to contribute to exercises if their account age is
|
||||
# lower than this amount in days.
|
||||
MIN_ACCOUNT_AGE_TO_TRUST=21
|
||||
# Synchronzing exercises
|
||||
# It is recommended to keep the local database synchronized with the wger
|
||||
# instance specified in WGER_INSTANCE since there are new added or translations
|
||||
# improved. For this you have different possibilities:
|
||||
# - Sync exercises on startup:
|
||||
# SYNC_EXERCISES_ON_STARTUP=True
|
||||
# DOWNLOAD_EXERCISE_IMAGES_ON_STARTUP=True
|
||||
# - Sync them in the background with celery. This will setup a job that will run
|
||||
# once a week at a random time (this time is selected once when starting the server)
|
||||
SYNC_EXERCISES_CELERY=True
|
||||
SYNC_EXERCISE_IMAGES_CELERY=True
|
||||
SYNC_EXERCISE_VIDEOS_CELERY=True
|
||||
# - Manually trigger the process as needed:
|
||||
# docker compose exec web python3 manage.py sync-exercises
|
||||
# docker compose exec web python3 manage.py download-exercise-images
|
||||
# docker compose exec web python3 manage.py download-exercise-videos
|
||||
|
||||
# Synchronzing ingredients
|
||||
# You can also syncronize the ingredients from a remote wger instance, and have
|
||||
# basically the same options as for the ingredients:
|
||||
# - Sync them in the background with celery. This will setup a job that will run
|
||||
# once a week at a random time (this time is selected once when starting the server)
|
||||
SYNC_INGREDIENTS_CELERY=True
|
||||
# - Manually trigger the process as needed:
|
||||
# docker compose exec web python3 manage.py sync-ingredients
|
||||
|
||||
# This option controls whether to download ingredients and their images from the
|
||||
# configured wger instance. When scanning products with the barcode scanner, it is
|
||||
# possible to dynamically fetch the ingredient if it is not known in the local database.
|
||||
# Possible values: WGER or None. Requires USE_CELERY to be set to true.
|
||||
DOWNLOAD_INGREDIENTS_FROM=WGER
|
||||
|
||||
# Whether celery is configured and should be used. Can be left to true with
|
||||
# this setup but can be deactivated if you are using the app in some other way
|
||||
USE_CELERY=True
|
||||
|
||||
#
|
||||
# Celery
|
||||
CELERY_BROKER=redis://cache:6379/2
|
||||
CELERY_BACKEND=redis://cache:6379/2
|
||||
CELERY_FLOWER_PASSWORD=adminadmin
|
||||
|
||||
#
|
||||
# Database
|
||||
DJANGO_DB_ENGINE=django.db.backends.postgresql
|
||||
DJANGO_DB_DATABASE=wger
|
||||
DJANGO_DB_USER=wger
|
||||
DJANGO_DB_PASSWORD=wger
|
||||
DJANGO_DB_HOST=db
|
||||
DJANGO_DB_PORT=5432
|
||||
DJANGO_PERFORM_MIGRATIONS=True # Perform any new database migrations on startup
|
||||
|
||||
#
|
||||
# Cache
|
||||
DJANGO_CACHE_BACKEND=django_redis.cache.RedisCache
|
||||
DJANGO_CACHE_LOCATION=redis://cache:6379/1
|
||||
DJANGO_CACHE_TIMEOUT=1296000 # in seconds - 60*60*24*15, 15 Days
|
||||
DJANGO_CACHE_CLIENT_CLASS=django_redis.client.DefaultClient
|
||||
# DJANGO_CACHE_CLIENT_PASSWORD=abcde... # Only if you changed the redis config
|
||||
# DJANGO_CACHE_CLIENT_SSL_KEYFILE=/path/to/ssl_keyfile # Path to an ssl private key.
|
||||
# DJANGO_CACHE_CLIENT_SSL_CERTFILE=/path/to/ssl_certfile # Path to an ssl certificate.
|
||||
# DJANGO_CACHE_CLIENT_SSL_CERT_REQS=<none | optional | required> # The string value for the verify_mode.
|
||||
# DJANGO_CACHE_CLIENT_SSL_CHECK_HOSTNAME=False # If set, match the hostname during the SSL handshake.
|
||||
|
||||
#
|
||||
# Brute force login attacks
|
||||
# https://django-axes.readthedocs.io/en/latest/index.html
|
||||
AXES_ENABLED=True
|
||||
AXES_FAILURE_LIMIT=10
|
||||
AXES_COOLOFF_TIME=30 # in minutes
|
||||
AXES_HANDLER=axes.handlers.cache.AxesCacheHandler
|
||||
AXES_LOCKOUT_PARAMETERS=ip_address
|
||||
AXES_IPWARE_PROXY_COUNT=1
|
||||
AXES_IPWARE_META_PRECEDENCE_ORDER=HTTP_X_FORWARDED_FOR,REMOTE_ADDR
|
||||
#
|
||||
# Others
|
||||
DJANGO_DEBUG=False
|
||||
WGER_USE_GUNICORN=True
|
||||
EXERCISE_CACHE_TTL=18000 # in seconds - 5*60*60, 5 hours
|
||||
SITE_URL=http://localhost
|
||||
|
||||
#
|
||||
# JWT auth
|
||||
ACCESS_TOKEN_LIFETIME=10 # The lifetime duration of the access token, in minutes
|
||||
REFRESH_TOKEN_LIFETIME=24 # The lifetime duration of the refresh token, in hours
|
||||
|
||||
#
|
||||
# Auth Proxy Authentication
|
||||
#
|
||||
# Please read the documentation before enabling this feature:
|
||||
# https://wger.readthedocs.io/en/latest/administration/auth_proxy.html
|
||||
AUTH_PROXY_HEADER=''
|
||||
AUTH_PROXY_TRUSTED_IPS=''
|
||||
AUTH_PROXY_CREATE_UNKNOWN_USER=False
|
||||
AUTH_PROXY_USER_EMAIL_HEADER=''
|
||||
AUTH_PROXY_USER_NAME_HEADER=''
|
||||
|
||||
#
|
||||
# Other possible settings
|
||||
|
||||
# Recaptcha keys. You will need to create an account and register your domain
|
||||
# https://www.google.com/recaptcha/
|
||||
# RECAPTCHA_PUBLIC_KEY=abcde...
|
||||
# RECAPTCHA_PRIVATE_KEY=abcde...
|
||||
USE_RECAPTCHA=False
|
||||
|
||||
# Clears the static files before copying the new ones (i.e. just calls collectstatic
|
||||
# with the appropriate flag: "manage.py collectstatic --no-input --clear"). Usually
|
||||
# This can be left like this but if you have problems and new static files are not
|
||||
# being copied correctly, clearing everything might help
|
||||
DJANGO_CLEAR_STATIC_FIRST=False
|
||||
|
||||
#
|
||||
# Email
|
||||
# https://docs.djangoproject.com/en/4.1/topics/email/#smtp-backend
|
||||
# ENABLE_EMAIL=False
|
||||
# EMAIL_HOST=email.example.com
|
||||
# EMAIL_PORT=587
|
||||
# EMAIL_HOST_USER=username
|
||||
# EMAIL_HOST_PASSWORD=password
|
||||
# EMAIL_USE_TLS=True
|
||||
# EMAIL_USE_SSL=False
|
||||
FROM_EMAIL='wger Workout Manager <wger@example.com>'
|
||||
|
||||
# Set your name and email to be notified if an internal server error occurs.
|
||||
# Needs a working email configuration
|
||||
# DJANGO_ADMINS=your name,email@example.com
|
||||
|
||||
# Whether to compress css and js files into one (of each)
|
||||
# COMPRESS_ENABLED=True
|
||||
|
||||
#
|
||||
# Django Rest Framework
|
||||
# The number of proxies in front of the application. In the default configuration
|
||||
# only nginx is. Change as approtriate if your setup differs. Also note that this
|
||||
# is only used when throttling API requests.
|
||||
NUMBER_OF_PROXIES=1
|
||||
|
||||
#
|
||||
# Gunicorn
|
||||
#
|
||||
# Additional gunicorn options, change as needed.
|
||||
# For the number of workers to spawn, a usually recommended value is (2 x $num_cores) + 1
|
||||
# see:
|
||||
# - https://docs.gunicorn.org/en/stable/settings.html
|
||||
# - https://github.com/wger-project/wger/blob/master/extras/docker/production/entrypoint.sh#L95
|
||||
GUNICORN_CMD_ARGS="--workers 3 --threads 2 --worker-class gthread --proxy-protocol True --timeout 240"
|
16
ansible/roles/wger/vars/main.yml
Normal file
16
ansible/roles/wger/vars/main.yml
Normal file
@@ -0,0 +1,16 @@
|
||||
DJANGO_SECRET: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
64326466343139613339363438386534363564626662366266353732383831613735613130666663
|
||||
6464623832646233653332313434303939666633613261640a393132616662326637356263373966
|
||||
30623465363333306430636462653738353737376635393366623162383437343430336163373832
|
||||
3931363133376466330a373565353636353932653436306165303664366539333263626566393436
|
||||
35386366633735373137616238303462616162636362306563343064383764383136
|
||||
|
||||
JWT_SECRET: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
36306265373261313533313237653432663230666162373062373166323061373932366434616532
|
||||
6538393830396535633434373530626566316538313732620a636439363632666430613938326164
|
||||
36363432363361653665303965353566623861323331306630316265633430616266363462636362
|
||||
6132636138306335620a393662663431623566663165383362663138356237343063363239353063
|
||||
61336633373963356533396132316432356534373930613434326235346639326634
|
||||
|
Reference in New Issue
Block a user