| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421 |
- #!/usr/bin/env bash
- set -euo pipefail
- ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
- export SOC_SHARED_NETWORK="${SOC_SHARED_NETWORK:-soc_shared}"
- export IRIS_HTTPS_PORT="${IRIS_HTTPS_PORT:-8443}"
- export INTERFACE_HTTPS_PORT="${IRIS_HTTPS_PORT}"
- export SHUFFLE_OPENSEARCH_PORT="${SHUFFLE_OPENSEARCH_PORT:-9201}"
- export PAGERDUTY_STUB_PORT="${PAGERDUTY_STUB_PORT:-18080}"
- export SOC_INTEGRATOR_PORT="${SOC_INTEGRATOR_PORT:-8088}"
- if [[ $# -eq 0 ]]; then
- COMMAND="up"
- ARGS=(-d)
- else
- COMMAND="$1"
- shift
- ARGS=("$@")
- fi
- if [[ "${COMMAND}" == "help" || "${COMMAND}" == "--help" || "${COMMAND}" == "-h" ]]; then
- cat <<'EOF'
- Usage:
- ./run-combined-stack.sh [command] [target] [options]
- Commands:
- up Start services (default: up -d when no args)
- recreate Force-recreate containers (picks up bind-mount inode changes)
- dedup Remove duplicate OpenSearch index patterns in Wazuh dashboard
- down Stop services (requires explicit target)
- logs View logs
- status Show container and endpoint status
- cleanup Remove unused Docker resources (containers/images/cache)
- help Show this help message
- Targets:
- all|--all All stacks (wazuh, iris, shuffle, pagerduty, integrator, flask-openapi-shuffle)
- wazuh Wazuh single-node stack
- iris IRIS-web stack
- shuffle Shuffle stack
- pagerduty PagerDuty stub
- integrator SOC integrator stack
- flask-openapi-shuffle Flask OpenAPI demo stack
- Examples:
- ./run-combined-stack.sh
- ./run-combined-stack.sh up --all -d
- ./run-combined-stack.sh up iris -d
- ./run-combined-stack.sh up flask-openapi-shuffle -d
- ./run-combined-stack.sh recreate wazuh
- ./run-combined-stack.sh recreate --all
- ./run-combined-stack.sh dedup
- ./run-combined-stack.sh down shuffle
- ./run-combined-stack.sh down --all
- ./run-combined-stack.sh logs integrator -f
- ./run-combined-stack.sh logs --all --tail 200
- ./run-combined-stack.sh cleanup
- ./run-combined-stack.sh cleanup --with-volumes
- ./run-combined-stack.sh status
- EOF
- exit 0
- fi
- if [[ "${COMMAND}" == "up" || "${COMMAND}" == "down" || "${COMMAND}" == "logs" ]]; then
- if ! docker network inspect "${SOC_SHARED_NETWORK}" >/dev/null 2>&1; then
- docker network create "${SOC_SHARED_NETWORK}" >/dev/null
- fi
- fi
- if [[ "${COMMAND}" == "status" ]]; then
- exec "${ROOT_DIR}/soc-status.sh"
- fi
- dedup_index_patterns() {
- local dashboard_url="https://localhost:443"
- local user="kibanaserver"
- local pass="kibanaserver"
- local max_wait=60
- local waited=0
- echo "Waiting for Wazuh dashboard to be ready..."
- until curl -sk -u "${user}:${pass}" "${dashboard_url}/api/status" -o /dev/null 2>&1; do
- sleep 3
- waited=$((waited + 3))
- if [[ ${waited} -ge ${max_wait} ]]; then
- echo "Dashboard not ready after ${max_wait}s — skipping dedup."
- return 1
- fi
- done
- echo "Scanning for duplicate index patterns..."
- python3 - <<PYEOF
- import json, sys, urllib.request, urllib.error, ssl
- BASE = "${dashboard_url}"
- AUTH = ("${user}", "${pass}")
- ctx = ssl.create_default_context(); ctx.check_hostname = False; ctx.verify_mode = ssl.CERT_NONE
- def req(method, path, data=None):
- import base64
- token = base64.b64encode(f"{AUTH[0]}:{AUTH[1]}".encode()).decode()
- headers = {"osd-xsrf": "true", "Authorization": f"Basic {token}"}
- if data:
- headers["Content-Type"] = "application/json"
- r = urllib.request.Request(BASE + path, data=data, headers=headers, method=method)
- with urllib.request.urlopen(r, context=ctx, timeout=15) as resp:
- return json.loads(resp.read())
- # Fetch all index-pattern saved objects
- result = req("GET", "/api/saved_objects/_find?type=index-pattern&per_page=100")
- patterns = result.get("saved_objects", [])
- # Group by title
- from collections import defaultdict
- by_title = defaultdict(list)
- for p in patterns:
- by_title[p["attributes"]["title"]].append(p)
- deleted = 0
- for title, objs in by_title.items():
- if len(objs) <= 1:
- continue
- # Keep the one whose ID matches the title (canonical), or the oldest updated_at
- canonical = next((o for o in objs if o["id"] == title), None)
- if not canonical:
- canonical = sorted(objs, key=lambda o: o.get("updated_at", ""))[0]
- to_delete = [o for o in objs if o["id"] != canonical["id"]]
- for obj in to_delete:
- try:
- req("DELETE", f"/api/saved_objects/index-pattern/{obj['id']}")
- print(f" deleted [{obj['id']}] title='{title}'")
- deleted += 1
- except urllib.error.HTTPError as e:
- print(f" error deleting [{obj['id']}]: {e}")
- if deleted == 0:
- print(" no duplicates found.")
- else:
- print(f" removed {deleted} duplicate(s).")
- PYEOF
- }
- run_cleanup() {
- local with_volumes="${1:-false}"
- echo "Pruning stopped containers..."
- docker container prune -f
- echo "Pruning unused images..."
- docker image prune -a -f
- echo "Pruning builder cache..."
- docker builder prune -f
- if [[ "${with_volumes}" == "true" ]]; then
- echo "Pruning unused volumes..."
- docker volume prune -f
- fi
- echo "Current Docker disk usage:"
- docker system df
- }
- run_wazuh() {
- docker compose \
- --project-name wazuh-single \
- --project-directory "${ROOT_DIR}/wazuh-docker/single-node" \
- -f "${ROOT_DIR}/wazuh-docker/single-node/docker-compose.yml" \
- -f "${ROOT_DIR}/compose-overrides/wazuh.shared-network.yml" \
- "${COMMAND}" ${ARGS[@]+"${ARGS[@]}"}
- }
- run_iris() {
- docker compose \
- --project-name iris-web \
- --project-directory "${ROOT_DIR}/iris-web" \
- -f "${ROOT_DIR}/iris-web/docker-compose.dev.yml" \
- -f "${ROOT_DIR}/compose-overrides/iris.shared-network.yml" \
- "${COMMAND}" ${ARGS[@]+"${ARGS[@]}"}
- }
- run_shuffle() {
- docker compose \
- --project-name shuffle \
- --project-directory "${ROOT_DIR}/Shuffle" \
- -f "${ROOT_DIR}/Shuffle/docker-compose.yml" \
- -f "${ROOT_DIR}/compose-overrides/shuffle.shared-network.yml" \
- "${COMMAND}" ${ARGS[@]+"${ARGS[@]}"}
- }
- run_pagerduty_stub() {
- docker compose \
- --project-name pagerduty-stub \
- --project-directory "${ROOT_DIR}" \
- -f "${ROOT_DIR}/compose-overrides/pagerduty.stub.yml" \
- "${COMMAND}" ${ARGS[@]+"${ARGS[@]}"}
- }
- run_soc_integrator() {
- docker compose \
- --project-name soc-integrator \
- --project-directory "${ROOT_DIR}/compose-overrides" \
- -f "${ROOT_DIR}/compose-overrides/soc-integrator.yml" \
- "${COMMAND}" ${ARGS[@]+"${ARGS[@]}"}
- }
- run_flask_openapi_shuffle() {
- docker compose \
- --project-name flask-openapi-shuffle \
- --project-directory "${ROOT_DIR}/flask-openapi-shuffle" \
- -f "${ROOT_DIR}/flask-openapi-shuffle/docker-compose.yml" \
- "${COMMAND}" ${ARGS[@]+"${ARGS[@]}"}
- }
- run_target() {
- local target="$1"
- case "${target}" in
- wazuh)
- run_wazuh
- ;;
- iris)
- run_iris
- ;;
- shuffle)
- run_shuffle
- ;;
- pagerduty)
- run_pagerduty_stub
- ;;
- integrator)
- run_soc_integrator
- ;;
- flask-openapi-shuffle)
- run_flask_openapi_shuffle
- ;;
- *)
- echo "Unknown target: ${target}"
- echo "Use one of: wazuh, iris, shuffle, pagerduty, integrator, flask-openapi-shuffle"
- exit 1
- ;;
- esac
- }
- run_all() {
- local mode="$1"
- if [[ "${mode}" == "down" ]]; then
- run_flask_openapi_shuffle
- run_soc_integrator
- run_pagerduty_stub
- run_shuffle
- run_iris
- run_wazuh
- else
- run_wazuh
- run_iris
- run_shuffle
- run_pagerduty_stub
- run_soc_integrator
- run_flask_openapi_shuffle
- fi
- }
- follow_all_logs() {
- COMMAND="logs"
- ARGS=("-f" "--tail" "${LOG_TAIL:-100}")
- run_wazuh &
- run_iris &
- run_shuffle &
- run_pagerduty_stub &
- run_soc_integrator &
- run_flask_openapi_shuffle &
- trap 'kill 0' INT TERM
- wait
- }
- run_logs_for_target() {
- local target="${1:-all}"
- case "${target}" in
- wazuh)
- run_wazuh
- ;;
- iris)
- run_iris
- ;;
- shuffle)
- run_shuffle
- ;;
- pagerduty)
- run_pagerduty_stub
- ;;
- integrator)
- run_soc_integrator
- ;;
- flask-openapi-shuffle)
- run_flask_openapi_shuffle
- ;;
- all|--all)
- run_wazuh
- run_iris
- run_shuffle
- run_pagerduty_stub
- run_soc_integrator
- run_flask_openapi_shuffle
- ;;
- *)
- echo "Unknown logs target: ${target}"
- echo "Use one of: wazuh, iris, shuffle, pagerduty, integrator, flask-openapi-shuffle"
- exit 1
- ;;
- esac
- }
- if [[ "${COMMAND}" == "down" ]]; then
- TARGET="${1:-}"
- if [[ -z "${TARGET}" ]]; then
- echo "Refusing to run 'down' without a target."
- echo "Use one of:"
- echo " ./run-combined-stack.sh down <wazuh|iris|shuffle|pagerduty|integrator|flask-openapi-shuffle>"
- echo " ./run-combined-stack.sh down --all"
- exit 1
- fi
- case "${TARGET}" in
- wazuh|iris|shuffle|pagerduty|integrator|flask-openapi-shuffle)
- ARGS=("${ARGS[@]:1}")
- run_target "${TARGET}"
- ;;
- all|--all)
- ARGS=("${ARGS[@]:1}")
- run_all "down"
- ;;
- *)
- run_all "down"
- ;;
- esac
- elif [[ "${COMMAND}" == "logs" ]]; then
- LOGS_TARGET="${1:-all}"
- case "${LOGS_TARGET}" in
- wazuh|iris|shuffle|pagerduty|integrator|flask-openapi-shuffle)
- ARGS=("${ARGS[@]:1}")
- run_logs_for_target "${LOGS_TARGET}"
- ;;
- all|--all)
- ARGS=("${ARGS[@]:1}")
- run_logs_for_target "all"
- ;;
- *)
- for arg in ${ARGS[@]+"${ARGS[@]}"}; do
- if [[ "${arg}" == "-f" || "${arg}" == "--follow" ]]; then
- echo "For follow mode, specify one target:"
- echo "./run-combined-stack.sh logs <wazuh|iris|shuffle|pagerduty|integrator|flask-openapi-shuffle> -f"
- exit 1
- fi
- done
- run_logs_for_target "all"
- ;;
- esac
- elif [[ "${COMMAND}" == "up" ]]; then
- TARGET="${1:-all}"
- case "${TARGET}" in
- wazuh|iris|shuffle|pagerduty|integrator|flask-openapi-shuffle)
- ARGS=("${ARGS[@]:1}")
- run_target "${TARGET}"
- ;;
- all|--all)
- ARGS=("${ARGS[@]:1}")
- HAS_DETACH="false"
- for arg in ${ARGS[@]+"${ARGS[@]}"}; do
- if [[ "${arg}" == "-d" || "${arg}" == "--detach" ]]; then
- HAS_DETACH="true"
- break
- fi
- done
- if [[ "${HAS_DETACH}" == "true" ]]; then
- run_all "up"
- else
- ARGS+=("-d")
- run_all "up"
- follow_all_logs
- fi
- ;;
- *)
- run_all "up"
- ;;
- esac
- elif [[ "${COMMAND}" == "recreate" ]]; then
- TARGET="${1:-all}"
- COMMAND="up"
- case "${TARGET}" in
- wazuh|iris|shuffle|pagerduty|integrator|flask-openapi-shuffle)
- ARGS=("--force-recreate" "-d")
- run_target "${TARGET}"
- ;;
- all|--all|*)
- ARGS=("--force-recreate" "-d")
- run_all "up"
- ;;
- esac
- elif [[ "${COMMAND}" == "cleanup" ]]; then
- WITH_VOLUMES="false"
- for arg in ${ARGS[@]+"${ARGS[@]}"}; do
- case "${arg}" in
- --with-volumes|-v)
- WITH_VOLUMES="true"
- ;;
- *)
- ;;
- esac
- done
- run_cleanup "${WITH_VOLUMES}"
- elif [[ "${COMMAND}" == "dedup" ]]; then
- dedup_index_patterns
- else
- run_all "up"
- fi
|