# Patterns
The task file from the specification is very basic. Commands can also have parameters and call each other.
The following is a collection of more complex task file patterns.
# Set default parameter
Fallback to a default value for a parameter.
build() {
local platform="amd64"
if [[ -n "$1" ]]; then
platform="$1"
fi
# Ensure parameter is not empty
Check the first param and exit if it is empty.
deploy() {
if [[ -z "$1" ]]; then echo "\$1 is empty."; exit; fi
# Prompt for input
Use read to ask for inputs.
if [[ -z "$2" ]]; then
read -p 'Enter the task description: ' task_description
else
task_description="$2"
fi
# Static env vars
These vars are exported from the script and used in external tools.
CONFIGURATION_FILE=${CONFIGURATION_FILE:=file.conf}
GIT_BRANCH=$(git symbolic-ref --short -q HEAD)
# Updateable vars
This variables can be configured from the environment, but is used only internally.
container_engine=${CONTAINER_ENGINE:=docker}
compose_command=${COMPOSE_COMMAND:=docker compose}
# Conditional vars
Depending on the value of another var, this vars value changes.
if [[ "$container_engine" == "podman" ]]; then
compose_command="podman-compose"
fi
# Template with env vars
Create a parameterized file from a template. Requires envsubst.
template-with-env() {
echo "Template $CONFIGURATION_FILE"
export CONFIGURATION_1
export CONFIGURATION_2=${CONFIGURATION_2:=value}
envsubst < 'file.conf.template' > "$CONFIGURATION_FILE"
}
# Create Python virtual env
Initialize Python virtual env with uv.
init-venv() {
if [[ ! -d ".venv$GIT_BRANCH" ]]; then
echo "Init .venv$GIT_BRANCH with $(uv --version)."
uv venv ".venv$GIT_BRANCH"
fi
}
# Call a Python script
Run a Python script.
generate-password-hash() {
activate-venv
if [[ -z "$1" ]]; then echo "\$1 is empty."; exit; fi
export PASSWORD_PLAIN="$1"
bin/password-hash
}
The Python script: bin/password-hash
#!/usr/bin/env python3
import os
from passlib.context import CryptContext
crypt_context = CryptContext(schemes=['pbkdf2_sha512', 'plaintext'], deprecated=['plaintext'])
password = os.environ.get('PASSWORD_PLAIN')
print(crypt_context.hash(password))
# Command with named parameters
Assuming you have a docker-compose.yml and would like to start selected or all containers.
start() {
if [[ "$1" =~ 'db' ]]; then
docker compose up -d db
fi
if [[ "$1" =~ 'admin' ]]; then
docker compose up -d admin
echo 'Open http://localhost:8000 url in your browser.'
fi
if [[ "$1" =~ 'odoo' ]]; then
docker compose up -d odoo
echo 'Open http://localhost:8069 url in your browser.'
fi
if [[ "$1" =~ 'mail' ]]; then
docker compose up -d mail
echo 'Open http://localhost:8025 url in your browser.'
fi
}
Start all containers with task start and selected with task start db,admin.
# Run commands in container
Use docker exec -i to run commands in a container.
drop-db() {
local database="$1"
if [[ -z "$database" ]]; then
database="example"
fi
docker exec -i db psql "postgres://odoo:odoo@localhost:5432/postgres" -c "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '$database';"
docker exec -i db psql "postgres://odoo:odoo@localhost:5432/postgres" -c "DROP DATABASE \"$database\";"
}
# Loop over files and folders
Use for to loop over file, folders or arrays.
render() {
echo "Update index.html for all folders"
for folder in ./*; do
if [[ -f "$folder/README.md" ]]; then
cd "$folder" || exit
md2html README.md _site/index.html
cd .. || exit
fi
done
}
# Parse and convert a file
With this function you can split a file into multiple parts whenever a specific keyword is matching. In this example the keyword is !vault.
convert-vault-file() {
local file_path="$1"
local temp_file=$(mktemp)
local temp_part_file=$(mktemp)
local write_finished=false
local current_key=""
while IFS= read -r line; do
# Check for keyword
if [[ "$line" =~ "!vault" ]]; then
# Process previous vault entry if it exists
if [[ "$write_finished" = true ]] && [[ -n "$current_key" ]] && [[ -s "$temp_part_file" ]]; then
# Decrypt part file and write to assemble file
ansible-vault decrypt "$temp_part_file"
local value=$(cat "$temp_part_file")
echo "$current_key: $value" >> "$temp_file"
fi
# Set up for new vault entry
current_key=$(echo "$line" | cut -d':' -f1)
# Clear the file
: > "$temp_part_file"
# Flag as ready to write
write_finished=true
else
if [[ "$write_finished" = true ]]; then
# Pipe into part file
echo "$line" >> "$temp_part_file"
fi
done
done < "$file_path"
# Process the final vault entry
if [[ "$write_finished" = true ]] && [[ -n "$current_key" ]] && [[ -s "$temp_part_file" ]]; then
ansible-vault decrypt "$temp_part_file"
local value=$(cat "$temp_part_file")
echo "$current_key: $value" >> "$temp_file"
fi
# Output assembled file
cat "$temp_file"
# Cleanup temp files
rm -f "$temp_file"
rm -f "$temp_part_file"
}
# Process data
This pattern tries to match the worklow of a jupyter notebook. Create a folder with the data processing scripts:
scripts
├── 01_import-mail-data
├── 02_transform-mail-data
└── 03_export-mail-data
The following function lists the scripts and asks for the number of scripts to run. Every script until the entered number is concatenated and executed.
process-data() {
while true; do
local scripts=($(ls scripts))
echo -e "Available scripts:\n"
for index in "${!scripts[@]}"; do
echo "$((index+1)): ${scripts[$index]}"
done
echo ""
read -p "Enter the number of scripts to run (or 'q' to quit): " user_input
if [[ "$user_input" = "q" ]]; then
break
fi
local temp_file=$(mktemp)
for index in "${!scripts[@]}"; do
if (( $user_input >= $((index+1)) )); then
cat "scripts/${scripts[$index]}" >> "$temp_file"
echo >> "$temp_file"
fi
done
clear
echo -e "\nPython Output:\n\n---"
python "$temp_file"
local exit_code=$?
echo -e "---\n"
echo -e "Exit code: $exit_code\n"
rm "$temp_file"
done
}