Nav apraksta

marshables.py 95KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585
  1. # IRIS Source Code
  2. # Copyright (C) 2021 - Airbus CyberSecurity (SAS)
  3. # ir@cyberactionlab.net
  4. #
  5. # This program is free software; you can redistribute it and/or
  6. # modify it under the terms of the GNU Lesser General Public
  7. # License as published by the Free Software Foundation; either
  8. # version 3 of the License, or (at your option) any later version.
  9. #
  10. # This program is distributed in the hope that it will be useful,
  11. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. # Lesser General Public License for more details.
  14. #
  15. # You should have received a copy of the GNU Lesser General Public License
  16. # along with this program; if not, write to the Free Software Foundation,
  17. # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
  18. import datetime
  19. import dateutil.parser
  20. import marshmallow
  21. import os
  22. import pyminizip
  23. import random
  24. import re
  25. import shutil
  26. import string
  27. import tempfile
  28. from flask_login import current_user
  29. from marshmallow import ValidationError
  30. from marshmallow import EXCLUDE
  31. from marshmallow import fields
  32. from marshmallow import post_load
  33. from marshmallow import pre_load
  34. from marshmallow.validate import Length
  35. from marshmallow_sqlalchemy import auto_field
  36. from pathlib import Path
  37. from sqlalchemy import func
  38. from sqlalchemy.orm import aliased
  39. from typing import Any
  40. from typing import Dict
  41. from typing import List
  42. from typing import Optional
  43. from typing import Tuple
  44. from typing import Union
  45. from werkzeug.datastructures import FileStorage
  46. from app import app
  47. from app import db
  48. from app import ma
  49. from app.datamgmt.datastore.datastore_db import datastore_get_standard_path
  50. from app.datamgmt.manage.manage_attribute_db import merge_custom_attributes
  51. from app.datamgmt.manage.manage_tags_db import add_db_tag
  52. from app.datamgmt.case.case_iocs_db import get_ioc_links
  53. from app.iris_engine.access_control.utils import ac_mask_from_val_list
  54. from app.models.models import AnalysisStatus
  55. from app.models.models import CaseClassification
  56. from app.models.models import SavedFilter
  57. from app.models.models import DataStorePath
  58. from app.models.models import IrisModuleHook
  59. from app.models.models import Tags
  60. from app.models.models import ReviewStatus
  61. from app.models.models import EvidenceTypes
  62. from app.models.models import CaseStatus
  63. from app.models.models import NoteDirectory
  64. from app.models.models import NoteRevisions
  65. from app.models.models import AssetsType
  66. from app.models.models import CaseAssets
  67. from app.models.models import CaseReceivedFile
  68. from app.models.models import CaseTasks
  69. from app.models.cases import Cases
  70. from app.models.cases import CasesEvent
  71. from app.models.models import Client
  72. from app.models.models import Comments
  73. from app.models.models import Contact
  74. from app.models.models import DataStoreFile
  75. from app.models.models import EventCategory
  76. from app.models.models import GlobalTasks
  77. from app.models.models import Ioc
  78. from app.models.models import IocType
  79. from app.models.models import IrisModule
  80. from app.models.models import Notes
  81. from app.models.models import NotesGroup
  82. from app.models.models import ServerSettings
  83. from app.models.models import TaskStatus
  84. from app.models.models import Tlp
  85. from app.models.alerts import Alert
  86. from app.models.alerts import Severity
  87. from app.models.alerts import AlertStatus
  88. from app.models.alerts import AlertResolutionStatus
  89. from app.models.authorization import Group
  90. from app.models.authorization import Organisation
  91. from app.models.authorization import User
  92. from app.models.cases import CaseState
  93. from app.models.cases import CaseProtagonist
  94. from app.util import file_sha256sum
  95. from app.util import str_to_bool
  96. from app.util import assert_type_mml
  97. from app.util import stream_sha256sum
  98. ALLOWED_EXTENSIONS = {'png', 'svg'}
  99. POSTGRES_INT_MAX = 2147483647
  100. POSTGRES_BIGINT_MAX = 9223372036854775807
  101. log = app.logger
  102. def allowed_file_icon(filename: str):
  103. """
  104. Checks if the file extension of the given filename is allowed.
  105. Args:
  106. filename (str): The name of the file to check.
  107. Returns:
  108. bool: True if the filename has an extension and the extension is in the ALLOWED_EXTENSIONS set, False otherwise.
  109. """
  110. return '.' in filename and \
  111. filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
  112. def get_random_string(length: int) -> str:
  113. """
  114. Generates a random string of lowercase letters.
  115. Args:
  116. length (int): The length of the string to generate.
  117. Returns:
  118. str: A random string of lowercase letters with the given length.
  119. """
  120. letters = string.ascii_lowercase
  121. result_str = ''.join(random.choice(letters) for i in range(length))
  122. return result_str
  123. def store_icon(file):
  124. """Stores an icon file.
  125. This function stores an icon file in the asset store path and creates a symlink to it in the asset show path.
  126. The file is saved with a randomly generated filename. If the file is not valid or its filetype is not allowed,
  127. the function returns an error message.
  128. Args:
  129. file: The icon file to store.
  130. Returns:
  131. A tuple containing the filename of the stored file (or None if an error occurred) and a message.
  132. """
  133. if not file:
  134. return None, 'Icon file is not valid'
  135. if not allowed_file_icon(file.filename):
  136. return None, 'Icon filetype is not allowed'
  137. filename = get_random_string(18)
  138. try:
  139. store_fullpath = os.path.join(app.config['ASSET_STORE_PATH'], filename)
  140. show_fullpath = os.path.join(app.config['APP_PATH'], 'app',
  141. app.config['ASSET_SHOW_PATH'].strip(os.path.sep),
  142. filename)
  143. file.save(store_fullpath)
  144. os.symlink(store_fullpath, show_fullpath)
  145. except Exception as e:
  146. return None, f"Unable to add icon {e}"
  147. return filename, 'Saved'
  148. class CaseNoteDirectorySchema(ma.SQLAlchemyAutoSchema):
  149. """Schema for serializing and deserializing CaseNoteDirectory objects.
  150. This schema defines the fields to include when serializing and deserializing CaseNoteDirectory objects.
  151. It includes fields for the CSRF token, directory name, directory description, and directory ID.
  152. It also includes a method for verifying the directory name.
  153. """
  154. class Meta:
  155. model = NoteDirectory
  156. load_instance = True
  157. include_fk = True
  158. unknown = EXCLUDE
  159. def verify_parent_id(self, parent_id, case_id, current_id=None):
  160. if current_id is not None and int(parent_id) == int(current_id):
  161. raise marshmallow.exceptions.ValidationError("Invalid parent id for the directory",
  162. field_name="parent_id")
  163. directory = NoteDirectory.query.filter(
  164. NoteDirectory.id == parent_id,
  165. NoteDirectory.case_id == case_id
  166. ).first()
  167. if directory:
  168. if current_id is not None and directory.parent_id == int(current_id):
  169. raise marshmallow.exceptions.ValidationError("Invalid parent id for the directory",
  170. field_name="parent_id")
  171. return parent_id
  172. raise marshmallow.exceptions.ValidationError("Invalid parent id for the directory",
  173. field_name="parent_id")
  174. @pre_load
  175. def verify_directory_name(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  176. """Verifies that the directory name is unique.
  177. This method verifies that the directory name specified in the data is unique. If the directory name is not
  178. unique, it raises a validation error.
  179. Args:
  180. data: The data to verify.
  181. kwargs: Additional keyword arguments.
  182. Returns:
  183. The verified data.
  184. Raises:
  185. ValidationError: If the directory name is not unique.
  186. """
  187. assert_type_mml(input_var=data.get('name'),
  188. field_name="name",
  189. type=str,
  190. allow_none=True)
  191. assert_type_mml(input_var=data.get('parent_id'),
  192. field_name="parent_id",
  193. type=int,
  194. allow_none=True)
  195. return data
  196. class UserSchema(ma.SQLAlchemyAutoSchema):
  197. """Schema for serializing and deserializing User objects.
  198. This schema defines the fields to include when serializing and deserializing User objects.
  199. It includes fields for the user's name, login, email, password, admin status, CSRF token, ID, primary organization ID,
  200. and service account status. It also includes methods for verifying the username, email, and password.
  201. """
  202. user_roles_str: List[str] = fields.List(fields.String, required=False)
  203. user_name: str = auto_field('name', required=True, validate=Length(min=2))
  204. user_login: str = auto_field('user', required=True, validate=Length(min=2))
  205. user_email: str = auto_field('email', required=True, validate=Length(min=2))
  206. user_password: Optional[str] = auto_field('password', required=False)
  207. user_isadmin: bool = fields.Boolean(required=True)
  208. user_id: Optional[int] = fields.Integer(required=False)
  209. user_primary_organisation_id: Optional[int] = fields.Integer(required=False)
  210. user_is_service_account: Optional[bool] = auto_field('is_service_account', required=False)
  211. class Meta:
  212. model = User
  213. load_instance = True
  214. include_fk = True
  215. exclude = ['api_key', 'password', 'ctx_case', 'ctx_human_case', 'user', 'name', 'email', 'is_service_account']
  216. unknown = EXCLUDE
  217. @pre_load()
  218. def verify_username(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  219. """Verifies that the username is not already taken.
  220. This method verifies that the specified username is not already taken by another user. If the username is already
  221. taken, it raises a validation error.
  222. Args:
  223. data: The data to verify.
  224. kwargs: Additional keyword arguments.
  225. Returns:
  226. The verified data.
  227. Raises:
  228. ValidationError: If the username is already taken.
  229. """
  230. user = data.get('user_login')
  231. user_id = data.get('user_id')
  232. assert_type_mml(input_var=user_id,
  233. field_name="user_id",
  234. type=int,
  235. allow_none=True)
  236. assert_type_mml(input_var=user,
  237. field_name="user_login",
  238. type=str,
  239. allow_none=True)
  240. luser = User.query.filter(
  241. User.user == user
  242. ).all()
  243. for usr in luser:
  244. if usr.id != user_id:
  245. raise marshmallow.exceptions.ValidationError('User name already taken',
  246. field_name="user_login")
  247. return data
  248. @pre_load()
  249. def verify_email(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  250. """Verifies that the email is not already taken.
  251. This method verifies that the specified email is not already taken by another user. If the email is already
  252. taken, it raises a validation error.
  253. Args:
  254. data: The data to verify.
  255. kwargs: Additional keyword arguments.
  256. Returns:
  257. The verified data.
  258. Raises:
  259. ValidationError: If the email is already taken.
  260. """
  261. email = data.get('user_email')
  262. user_id = data.get('user_id')
  263. assert_type_mml(input_var=user_id,
  264. field_name="user_id",
  265. type=int,
  266. allow_none=True)
  267. assert_type_mml(input_var=email,
  268. field_name="user_email",
  269. type=str,
  270. allow_none=True)
  271. luser = User.query.filter(
  272. User.email == email
  273. ).all()
  274. for usr in luser:
  275. if usr.id != user_id:
  276. raise marshmallow.exceptions.ValidationError('User email already taken',
  277. field_name="user_email")
  278. return data
  279. @pre_load()
  280. def verify_password(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  281. """Verifies that the password meets the server's password policy.
  282. This method verifies that the specified password meets the server's password policy. If the password does not
  283. meet the policy, it raises a validation error.
  284. Args:
  285. data: The data to verify.
  286. kwargs: Additional keyword arguments.
  287. Returns:
  288. The verified data.
  289. Raises:
  290. ValidationError: If the password does not meet the server's password policy.
  291. """
  292. server_settings = ServerSettings.query.first()
  293. password = data.get('user_password')
  294. if (password == '' or password is None) and str_to_bool(data.get('user_is_service_account')) is True:
  295. return data
  296. if (password == '' or password is None) and data.get('user_id') != 0:
  297. # Update
  298. data.pop('user_password') if 'user_password' in data else None
  299. else:
  300. password_error = ""
  301. if len(password) < server_settings.password_policy_min_length:
  302. password_error += f"Password must be longer than {server_settings.password_policy_min_length} characters. "
  303. if server_settings.password_policy_upper_case:
  304. if not any(char.isupper() for char in password):
  305. password_error += "Password must contain uppercase char. "
  306. if server_settings.password_policy_lower_case:
  307. if not any(char.islower() for char in password):
  308. password_error += "Password must contain lowercase char. "
  309. if server_settings.password_policy_digit:
  310. if not any(char.isdigit() for char in password):
  311. password_error += "Password must contain digit. "
  312. if len(server_settings.password_policy_special_chars) > 0:
  313. if not any(char in server_settings.password_policy_special_chars for char in password):
  314. password_error += f"Password must contain a special char [{server_settings.password_policy_special_chars}]. "
  315. if len(password_error) > 0:
  316. raise marshmallow.exceptions.ValidationError(password_error,
  317. field_name="user_password")
  318. return data
  319. class CommentSchema(ma.SQLAlchemyAutoSchema):
  320. """Schema for serializing and deserializing Comment objects.
  321. This schema defines the fields to include when serializing and deserializing Comment objects.
  322. It includes fields for the comment ID, the user who made the comment, the comment text, and the timestamp of the comment.
  323. """
  324. user = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  325. class Meta:
  326. model = Comments
  327. load_instance = True
  328. include_fk = True
  329. unknown = EXCLUDE
  330. class CaseNoteRevisionSchema(ma.SQLAlchemyAutoSchema):
  331. """Schema for serializing and deserializing CaseNoteVersion objects."""
  332. user_name = fields.String()
  333. class Meta:
  334. model = NoteRevisions
  335. load_instance = True
  336. include_fk = True
  337. unknown = EXCLUDE
  338. class CaseNoteSchema(ma.SQLAlchemyAutoSchema):
  339. """Schema for serializing and deserializing CaseNote objects.
  340. This schema defines the fields to include when serializing and deserializing CaseNote objects.
  341. It includes fields for the CSRF token, group ID, group UUID, and group title.
  342. """
  343. comments = fields.Nested('CommentSchema', many=True)
  344. directory = fields.Nested('CaseNoteDirectorySchema', many=False)
  345. class Meta:
  346. model = Notes
  347. load_instance = True
  348. include_fk = True
  349. unknown = EXCLUDE
  350. def verify_directory_id(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  351. """Verifies that the directory ID is valid.
  352. This method verifies that the directory ID specified in the data is valid for the case ID specified in kwargs.
  353. If the group ID is valid, it returns the data. Otherwise, it raises a validation error.
  354. Args:
  355. data: The data to verify.
  356. kwargs: Additional keyword arguments, including the case ID.
  357. Returns:
  358. The verified data.
  359. Raises:
  360. ValidationError: If the directory ID is invalid.
  361. """
  362. assert_type_mml(input_var=data.get('directory_id'),
  363. field_name="directory_id",
  364. type=int)
  365. directory = NoteDirectory.query.filter(
  366. NoteDirectory.id == data.get('directory_id'),
  367. NoteDirectory.case_id == kwargs.get('caseid')
  368. ).first()
  369. if directory:
  370. return data
  371. raise marshmallow.exceptions.ValidationError("Invalid directory id for the case",
  372. field_name="directory_id")
  373. @post_load
  374. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  375. """Merges custom attributes.
  376. This method merges any custom attributes specified in the data with the existing custom attributes for the note.
  377. If there are no custom attributes specified, it returns the data unchanged.
  378. Args:
  379. data: The data to merge.
  380. kwargs: Additional keyword arguments.
  381. Returns:
  382. The merged data.
  383. """
  384. new_attr = data.get('custom_attributes')
  385. if new_attr is not None:
  386. assert_type_mml(input_var=data.get('note_id'),
  387. field_name="note_id",
  388. type=int,
  389. allow_none=True)
  390. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('note_id'), 'note')
  391. return data
  392. class CaseAddNoteSchema(ma.Schema):
  393. """Schema for serializing and deserializing CaseNote objects.
  394. This schema defines the fields to include when serializing and deserializing CaseNote objects.
  395. It includes fields for the note ID, note title, note content, group ID, CSRF token, and custom attributes.
  396. It also includes a method for verifying the group ID and a post-load method for merging custom attributes.
  397. """
  398. note_id: int = fields.Integer(required=False)
  399. note_title: str = fields.String(required=True, validate=Length(min=1, max=154), allow_none=False)
  400. note_content: str = fields.String(required=False)
  401. custom_attributes: Dict[str, Any] = fields.Dict(required=False)
  402. def verify_directory_id(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  403. """Verifies that the directory ID is valid.
  404. This method verifies that the directory ID specified in the data is valid for the case ID specified in kwargs.
  405. If the group ID is valid, it returns the data. Otherwise, it raises a validation error.
  406. Args:
  407. data: The data to verify.
  408. kwargs: Additional keyword arguments, including the case ID.
  409. Returns:
  410. The verified data.
  411. Raises:
  412. ValidationError: If the directory ID is invalid.
  413. """
  414. assert_type_mml(input_var=data.get('directory_id'),
  415. field_name="directory_id",
  416. type=int)
  417. directory = NoteDirectory.query.filter(
  418. NoteDirectory.id == data.get('directory_id'),
  419. NoteDirectory.case_id == kwargs.get('caseid')
  420. ).first()
  421. if directory:
  422. return data
  423. raise marshmallow.exceptions.ValidationError("Invalid directory id for the case",
  424. field_name="directory_id")
  425. @post_load
  426. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  427. """Merges custom attributes.
  428. This method merges any custom attributes specified in the data with the existing custom attributes for the note.
  429. If there are no custom attributes specified, it returns the data unchanged.
  430. Args:
  431. data: The data to merge.
  432. kwargs: Additional keyword arguments.
  433. Returns:
  434. The merged data.
  435. """
  436. new_attr = data.get('custom_attributes')
  437. if new_attr is not None:
  438. assert_type_mml(input_var=data.get('note_id'),
  439. field_name="note_id",
  440. type=int,
  441. allow_none=True)
  442. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('note_id'), 'note')
  443. return data
  444. class CaseGroupNoteSchema(ma.SQLAlchemyAutoSchema):
  445. """Schema for serializing and deserializing NotesGroup objects.
  446. This schema defines the fields to include when serializing and deserializing NotesGroup objects.
  447. It includes fields for the group ID, group UUID, group title, and the notes associated with the group.
  448. """
  449. class Meta:
  450. model = NotesGroup
  451. load_instance = True
  452. unknown = EXCLUDE
  453. class AssetTypeSchema(ma.SQLAlchemyAutoSchema):
  454. """Schema for serializing and deserializing AssetsType objects.
  455. This schema defines the fields to include when serializing and deserializing AssetsType objects.
  456. It includes fields for the CSRF token, asset name, asset description, and asset icons for both compromised and
  457. not compromised states. It also includes a method for verifying that the asset name is unique and a method for
  458. loading and storing asset icons.
  459. """
  460. asset_name: str = auto_field('asset_name', required=True, validate=Length(min=2), allow_none=False)
  461. asset_description: str = auto_field('asset_description', required=True, validate=Length(min=2), allow_none=False)
  462. asset_icon_compromised: str = auto_field('asset_icon_compromised')
  463. asset_icon_not_compromised: str = auto_field('asset_icon_not_compromised')
  464. class Meta:
  465. model = AssetsType
  466. load_instance = True
  467. unknown = EXCLUDE
  468. @post_load
  469. def verify_unique(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  470. """Verifies that the asset name is unique.
  471. This method verifies that the asset name specified in the data is unique. If the asset name is not unique,
  472. it raises a validation error.
  473. Args:
  474. data: The data to verify.
  475. kwargs: Additional keyword arguments.
  476. Returns:
  477. The verified data.
  478. Raises:
  479. ValidationError: If the asset name is not unique.
  480. """
  481. assert_type_mml(input_var=data.asset_name,
  482. field_name="asset_name",
  483. type=str)
  484. assert_type_mml(input_var=data.asset_id,
  485. field_name="asset_id",
  486. type=int,
  487. allow_none=True)
  488. client = AssetsType.query.filter(
  489. func.lower(AssetsType.asset_name) == func.lower(data.asset_name),
  490. AssetsType.asset_id != data.asset_id
  491. ).first()
  492. if client:
  493. raise marshmallow.exceptions.ValidationError(
  494. "Asset type name already exists",
  495. field_name="asset_name"
  496. )
  497. return data
  498. def load_store_icon(self, file_storage: Any, field_type: str) -> Optional[str]:
  499. """Loads and stores an asset icon.
  500. This method loads and stores an asset icon from the specified file storage. If the file storage is not valid
  501. or its filetype is not allowed, it raises a validation error.
  502. Args:
  503. file_storage: The file storage containing the asset icon.
  504. field_type: The type of asset icon to load and store.
  505. Returns:
  506. The filename of the stored asset icon, or None if an error occurred.
  507. Raises:
  508. ValidationError: If the file storage is not valid or its filetype is not allowed.
  509. """
  510. if not file_storage.filename:
  511. return None
  512. fpath, message = store_icon(file_storage)
  513. if fpath is None:
  514. raise marshmallow.exceptions.ValidationError(
  515. message,
  516. field_name=field_type
  517. )
  518. setattr(self, field_type, fpath)
  519. return fpath
  520. class CaseAssetsSchema(ma.SQLAlchemyAutoSchema):
  521. """Schema for serializing and deserializing CaseAssets objects.
  522. This schema defines the fields to include when serializing and deserializing CaseAssets objects.
  523. It includes fields for the asset name, IOC links, asset enrichment, asset type, and custom attributes.
  524. It also includes methods for verifying the asset type ID and analysis status ID, and for merging custom attributes.
  525. """
  526. asset_name: str = auto_field('asset_name', required=True, allow_none=False)
  527. ioc_links: List[int] = fields.List(fields.Integer, required=False)
  528. asset_enrichment: str = auto_field('asset_enrichment', required=False)
  529. asset_type: AssetTypeSchema = ma.Nested(AssetTypeSchema, required=False)
  530. alerts = fields.Nested('AlertSchema', many=True, exclude=['assets'])
  531. analysis_status = fields.Nested('AnalysisStatusSchema', required=False)
  532. class Meta:
  533. model = CaseAssets
  534. include_fk = True
  535. load_instance = True
  536. unknown = EXCLUDE
  537. @staticmethod
  538. def is_unique_for_customer(customer_id, request_data):
  539. """
  540. Check if the asset is unique for the customer
  541. """
  542. if request_data.get('asset_name') is None:
  543. raise marshmallow.exceptions.ValidationError("Asset name is required",
  544. field_name="asset_name")
  545. case_alias = aliased(Cases)
  546. asset_alias = aliased(CaseAssets)
  547. asset = db.session.query(
  548. asset_alias.asset_id
  549. ).join(
  550. case_alias, asset_alias.case_id == case_alias.case_id
  551. ).filter(
  552. func.lower(asset_alias.asset_name) == request_data.get('asset_name').lower(),
  553. asset_alias.asset_type_id == request_data.get('asset_type_id'),
  554. asset_alias.asset_id != request_data.get('asset_id'),
  555. case_alias.client_id == customer_id
  556. ).first()
  557. if asset is not None:
  558. return asset
  559. return None
  560. @pre_load
  561. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  562. """Verifies the asset type ID and analysis status ID.
  563. This method verifies that the asset type ID and analysis status ID specified in the data are valid.
  564. If either ID is invalid, it raises a validation error.
  565. Args:
  566. data: The data to verify.
  567. kwargs: Additional keyword arguments.
  568. Returns:
  569. The verified data.
  570. Raises:
  571. ValidationError: If either ID is invalid.
  572. """
  573. assert_type_mml(input_var=data.get('asset_type_id'),
  574. field_name="asset_type_id",
  575. type=int)
  576. asset_type = AssetsType.query.filter(AssetsType.asset_id == data.get('asset_type_id')).count()
  577. if not asset_type:
  578. raise marshmallow.exceptions.ValidationError("Invalid asset type ID",
  579. field_name="asset_type_id")
  580. assert_type_mml(input_var=data.get('analysis_status_id'),
  581. field_name="analysis_status_id", type=int,
  582. allow_none=True)
  583. if data.get('analysis_status_id'):
  584. status = AnalysisStatus.query.filter(AnalysisStatus.id == data.get('analysis_status_id')).count()
  585. if not status:
  586. raise marshmallow.exceptions.ValidationError("Invalid analysis status ID",
  587. field_name="analysis_status_id")
  588. if data.get('asset_tags'):
  589. for tag in data.get('asset_tags').split(','):
  590. if not isinstance(tag, str):
  591. raise marshmallow.exceptions.ValidationError("All items in list must be strings",
  592. field_name="asset_tags")
  593. add_db_tag(tag.strip())
  594. return data
  595. @post_load
  596. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  597. """Merges custom attributes.
  598. This method merges any custom attributes specified in the data with the existing custom attributes for the asset.
  599. If there are no custom attributes specified, it returns the data unchanged.
  600. Args:
  601. data: The data to merge.
  602. kwargs: Additional keyword arguments.
  603. Returns:
  604. The merged data.
  605. """
  606. new_attr = data.get('custom_attributes')
  607. if new_attr is not None:
  608. assert_type_mml(input_var=data.get('asset_id'),
  609. field_name="asset_id", type=int,
  610. allow_none=True)
  611. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('asset_id'), 'asset')
  612. return data
  613. class CaseTemplateSchema(ma.Schema):
  614. """Schema for serializing and deserializing CaseTemplate objects.
  615. This schema defines the fields to include when serializing and deserializing CaseTemplate objects.
  616. It includes fields for the template ID, the user ID of the user who created the template, the creation and update
  617. timestamps, the name, display name, description, author, title prefix, summary, tags, and classification of the
  618. template. It also includes fields for the tasks and note groups associated with the template, and methods for
  619. validating the format of the tasks and note groups.
  620. """
  621. id: int = fields.Integer(dump_only=True)
  622. created_by_user_id: int = fields.Integer(required=True)
  623. created_at: datetime = fields.DateTime(dump_only=True)
  624. updated_at: datetime = fields.DateTime(dump_only=True)
  625. name: str = fields.String(required=True)
  626. display_name: Optional[str] = fields.String(allow_none=True, missing="")
  627. description: Optional[str] = fields.String(allow_none=True, missing="")
  628. author: Optional[str] = fields.String(allow_none=True, validate=Length(max=128), missing="")
  629. title_prefix: Optional[str] = fields.String(allow_none=True, validate=Length(max=32), missing="")
  630. summary: Optional[str] = fields.String(allow_none=True, missing="")
  631. tags: Optional[List[str]] = fields.List(fields.String(), allow_none=True, missing=[])
  632. classification: Optional[str] = fields.String(allow_none=True, missing="")
  633. note_directories: Optional[List[Dict[str, Union[str, List[Dict[str, str]]]]]] = fields.List(fields.Dict(),
  634. allow_none=True,
  635. missing=[])
  636. @staticmethod
  637. def validate_string_or_list(value: Union[str, List[str]]) -> Union[str, List[str]]:
  638. """Validates that a value is a string or a list of strings.
  639. This method validates that a value is either a string or a list of strings. If the value is a list, it also
  640. validates that all items in the list are strings.
  641. Args:
  642. value: The value to validate.
  643. Returns:
  644. The validated value.
  645. Raises:
  646. ValidationError: If the value is not a string or a list of strings.
  647. """
  648. if not isinstance(value, (str, list)):
  649. raise ValidationError('Value must be a string or a list of strings')
  650. if isinstance(value, list):
  651. for item in value:
  652. if not isinstance(item, str):
  653. raise ValidationError('All items in list must be strings')
  654. return value
  655. @staticmethod
  656. def validate_string_or_list_of_dict(value: Union[str, List[Dict[str, str]]]) -> Union[str, List[Dict[str, str]]]:
  657. """Validates that a value is a string or a list of dictionaries with string values.
  658. This method validates that a value is either a string or a list of dictionaries with string values. If the value
  659. is a list, it also validates that all items in the list are dictionaries with string values.
  660. Args:
  661. value: The value to validate.
  662. Returns:
  663. The validated value.
  664. Raises:
  665. ValidationError: If the value is not a string or a list of dictionaries with string values.
  666. """
  667. if not isinstance(value, (str, list)):
  668. raise ValidationError('Value must be a string or a list of strings')
  669. if isinstance(value, list):
  670. for item in value:
  671. if not isinstance(item, dict):
  672. raise ValidationError('All items in list must be dict')
  673. for ivalue in item.values():
  674. if not isinstance(ivalue, str):
  675. raise ValidationError('All items in dict must be str')
  676. return value
  677. tasks: Optional[List[Dict[str, Union[str, List[str]]]]] = fields.List(
  678. fields.Dict(keys=fields.Str(), values=fields.Raw(validate=[validate_string_or_list])),
  679. allow_none=True,
  680. missing=[]
  681. )
  682. class IocTypeSchema(ma.SQLAlchemyAutoSchema):
  683. """Schema for serializing and deserializing IocType objects.
  684. This schema defines the fields to include when serializing and deserializing IocType objects.
  685. It includes fields for the IOC type name, description, taxonomy, validation regex, and validation expectation.
  686. It also includes a method for verifying that the IOC type name is unique.
  687. """
  688. type_name: str = auto_field('type_name', required=True, validate=Length(min=2), allow_none=False)
  689. type_description: str = auto_field('type_description', required=True, validate=Length(min=2), allow_none=False)
  690. type_taxonomy: Optional[str] = auto_field('type_taxonomy')
  691. type_validation_regex: Optional[str] = auto_field('type_validation_regex')
  692. type_validation_expect: Optional[str] = auto_field('type_validation_expect')
  693. class Meta:
  694. model = IocType
  695. load_instance = True
  696. unknown = EXCLUDE
  697. @post_load
  698. def verify_unique(self, data: IocType, **kwargs: Any) -> IocType:
  699. """Verifies that the IOC type name is unique.
  700. This method verifies that the IOC type name specified in the data is unique.
  701. If the name is not unique, it raises a validation error.
  702. Args:
  703. data: The data to verify.
  704. kwargs: Additional keyword arguments.
  705. Returns:
  706. The verified data.
  707. Raises:
  708. ValidationError: If the IOC type name is not unique.
  709. """
  710. client = IocType.query.filter(
  711. func.lower(IocType.type_name) == func.lower(data.type_name),
  712. IocType.type_id != data.type_id
  713. ).first()
  714. if client:
  715. raise marshmallow.exceptions.ValidationError(
  716. "IOC type name already exists",
  717. field_name="type_name"
  718. )
  719. return data
  720. class TlpSchema(ma.SQLAlchemyAutoSchema):
  721. class Meta:
  722. model = Tlp
  723. load_instance = True
  724. include_fk = True
  725. unknown = EXCLUDE
  726. # TODO try to remove IocSchema and replace it by this new schema
  727. class IocSchemaForAPIV2(ma.SQLAlchemyAutoSchema):
  728. """Schema for serializing and deserializing IOC objects.
  729. This schema defines the fields to include when serializing and deserializing IOC objects.
  730. It includes fields for the IOC value, enrichment data, and the IOC type associated with the IOC.
  731. It also includes methods for verifying the format of the IOC value and merging custom attributes.
  732. """
  733. ioc_value: str = auto_field('ioc_value', required=True, validate=Length(min=1), allow_none=False)
  734. ioc_enrichment: Optional[Dict[str, Any]] = auto_field('ioc_enrichment', required=False)
  735. ioc_type: Optional[IocTypeSchema] = ma.Nested(IocTypeSchema, required=False)
  736. tlp = ma.Nested(TlpSchema)
  737. def get_link(self, ioc):
  738. ial = get_ioc_links(ioc.ioc_id)
  739. return [row._asdict() for row in ial]
  740. link = ma.Method('get_link')
  741. class Meta:
  742. model = Ioc
  743. load_instance = True
  744. include_fk = True
  745. unknown = EXCLUDE
  746. @pre_load
  747. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  748. """Verifies the format of the IOC value and associated IOC type.
  749. This method verifies that the IOC value specified in the data matches the expected format for the associated
  750. IOC type. If the value does not match the expected format, it raises a validation error. It also verifies that
  751. the specified IOC type ID and TLP ID are valid.
  752. Args:
  753. data: The data to verify.
  754. kwargs: Additional keyword arguments.
  755. Returns:
  756. The verified data.
  757. Raises:
  758. ValidationError: If the IOC value does not match the expected format or if the specified IOC type ID or
  759. TLP ID are invalid.
  760. """
  761. if data.get('ioc_type_id'):
  762. assert_type_mml(input_var=data.get('ioc_type_id'), field_name="ioc_type_id", type=int)
  763. ioc_type = IocType.query.filter(IocType.type_id == data.get('ioc_type_id')).first()
  764. if not ioc_type:
  765. raise marshmallow.exceptions.ValidationError("Invalid IOC type ID", field_name="ioc_type_id")
  766. if ioc_type.type_validation_regex:
  767. if not re.fullmatch(ioc_type.type_validation_regex, data.get('ioc_value'), re.IGNORECASE):
  768. error = f"The input doesn\'t match the expected format " \
  769. f"(expected: {ioc_type.type_validation_expect or ioc_type.type_validation_regex})"
  770. raise marshmallow.exceptions.ValidationError(error, field_name="ioc_ioc_value")
  771. if data.get('ioc_tlp_id'):
  772. assert_type_mml(input_var=data.get('ioc_tlp_id'), field_name="ioc_tlp_id", type=int,
  773. max_val=POSTGRES_INT_MAX)
  774. Tlp.query.filter(Tlp.tlp_id == data.get('ioc_tlp_id')).count()
  775. if data.get('ioc_tags'):
  776. for tag in data.get('ioc_tags').split(','):
  777. if not isinstance(tag, str):
  778. raise marshmallow.exceptions.ValidationError("All items in list must be strings",
  779. field_name="ioc_tags")
  780. add_db_tag(tag.strip())
  781. return data
  782. @post_load
  783. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  784. """Merges custom attributes with the IOC data.
  785. This method merges any custom attributes specified in the data with the IOC data. If no custom attributes are
  786. specified, it returns the original data.
  787. Args:
  788. data: The data to merge.
  789. kwargs: Additional keyword arguments.
  790. Returns:
  791. The merged data.
  792. """
  793. new_attr = data.get('custom_attributes')
  794. if new_attr is not None:
  795. assert_type_mml(input_var=data.get('ioc_id'),
  796. field_name="ioc_id",
  797. type=int,
  798. allow_none=True)
  799. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('ioc_id'), 'ioc')
  800. return data
  801. class IocSchema(ma.SQLAlchemyAutoSchema):
  802. """Schema for serializing and deserializing IOC objects.
  803. This schema defines the fields to include when serializing and deserializing IOC objects.
  804. It includes fields for the IOC value, enrichment data, and the IOC type associated with the IOC.
  805. It also includes methods for verifying the format of the IOC value and merging custom attributes.
  806. """
  807. ioc_value: str = auto_field('ioc_value', required=True, validate=Length(min=1), allow_none=False)
  808. ioc_enrichment: Optional[Dict[str, Any]] = auto_field('ioc_enrichment', required=False)
  809. ioc_type: Optional[IocTypeSchema] = ma.Nested(IocTypeSchema, required=False)
  810. class Meta:
  811. model = Ioc
  812. load_instance = True
  813. include_fk = True
  814. unknown = EXCLUDE
  815. @pre_load
  816. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  817. """Verifies the format of the IOC value and associated IOC type.
  818. This method verifies that the IOC value specified in the data matches the expected format for the associated
  819. IOC type. If the value does not match the expected format, it raises a validation error. It also verifies that
  820. the specified IOC type ID and TLP ID are valid.
  821. Args:
  822. data: The data to verify.
  823. kwargs: Additional keyword arguments.
  824. Returns:
  825. The verified data.
  826. Raises:
  827. ValidationError: If the IOC value does not match the expected format or if the specified IOC type ID or
  828. TLP ID are invalid.
  829. """
  830. if data.get('ioc_type_id'):
  831. assert_type_mml(input_var=data.get('ioc_type_id'), field_name='ioc_type_id', type=int)
  832. ioc_type = IocType.query.filter(IocType.type_id == data.get('ioc_type_id')).first()
  833. if not ioc_type:
  834. raise marshmallow.exceptions.ValidationError('Invalid IOC type ID', field_name='ioc_type_id')
  835. if ioc_type.type_validation_regex:
  836. if not re.fullmatch(ioc_type.type_validation_regex, data.get('ioc_value'), re.IGNORECASE):
  837. error = f'The input doesn\'t match the expected format ' \
  838. f'(expected: {ioc_type.type_validation_expect or ioc_type.type_validation_regex})'
  839. raise marshmallow.exceptions.ValidationError(error, field_name="ioc_ioc_value")
  840. if data.get('ioc_tlp_id'):
  841. assert_type_mml(input_var=data.get('ioc_tlp_id'), field_name='ioc_tlp_id', type=int,
  842. max_val=POSTGRES_INT_MAX)
  843. Tlp.query.filter(Tlp.tlp_id == data.get('ioc_tlp_id')).count()
  844. if data.get('ioc_tags'):
  845. for tag in data.get('ioc_tags').split(','):
  846. if not isinstance(tag, str):
  847. raise marshmallow.exceptions.ValidationError('All items in list must be strings',
  848. field_name='ioc_tags')
  849. add_db_tag(tag.strip())
  850. return data
  851. @post_load
  852. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  853. """Merges custom attributes with the IOC data.
  854. This method merges any custom attributes specified in the data with the IOC data. If no custom attributes are
  855. specified, it returns the original data.
  856. Args:
  857. data: The data to merge.
  858. kwargs: Additional keyword arguments.
  859. Returns:
  860. The merged data.
  861. """
  862. new_attr = data.get('custom_attributes')
  863. if new_attr is not None:
  864. assert_type_mml(input_var=data.get('ioc_id'),
  865. field_name="ioc_id",
  866. type=int,
  867. allow_none=True)
  868. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('ioc_id'), 'ioc')
  869. return data
  870. class UserFullSchema(ma.SQLAlchemyAutoSchema):
  871. """
  872. Schema for serializing and deserializing User objects.
  873. This schema defines the fields to include when serializing and deserializing User objects.
  874. It includes fields for the user's name, login, email, password, admin status, CSRF token, ID, primary organization ID,
  875. and service account status. It also includes methods for verifying the username, email, and password.
  876. """
  877. class Meta:
  878. model = User
  879. load_instance = True
  880. include_fk = True
  881. exclude = ['password', 'ctx_case', 'ctx_human_case']
  882. unknown = EXCLUDE
  883. class EventSchema(ma.SQLAlchemyAutoSchema):
  884. """Schema for serializing and deserializing Event objects.
  885. This schema defines the fields to include when serializing and deserializing Event objects.
  886. It includes fields for the event ID, event title, assets associated with the event, IOCs associated with the event,
  887. the date and time of the event, the time zone of the event, the category ID of the event, and the modification history
  888. of the event.
  889. """
  890. event_title: str = auto_field('event_title', required=True, validate=Length(min=2), allow_none=False)
  891. event_assets: List[int] = fields.List(fields.Integer, required=True, allow_none=False)
  892. event_iocs: List[int] = fields.List(fields.Integer, required=True, allow_none=False)
  893. event_date: datetime = fields.DateTime("%Y-%m-%dT%H:%M:%S.%f", required=True, allow_none=False)
  894. event_tz: str = fields.String(required=True, allow_none=False)
  895. event_category_id: int = fields.Integer(required=True, allow_none=False)
  896. event_date_wtz: datetime = fields.DateTime("%Y-%m-%dT%H:%M:%S.%f", required=False, allow_none=False)
  897. modification_history: str = auto_field('modification_history', required=False, readonly=True)
  898. event_comments_map: List[int] = fields.List(fields.Integer, required=False, allow_none=True)
  899. event_sync_iocs_assets: bool = fields.Boolean(required=False)
  900. children = fields.Nested('EventSchema', many=True, required=False)
  901. class Meta:
  902. model = CasesEvent
  903. load_instance = True
  904. include_fk = True
  905. unknown = EXCLUDE
  906. def validate_date(self, event_date: str, event_tz: str):
  907. """Validates the date and time of the event.
  908. This method validates the date and time of the event by parsing the date and time string and time zone string
  909. and returning the parsed date and time as datetime objects.
  910. Args:
  911. event_date: The date and time of the event as a string.
  912. event_tz: The time zone of the event as a string.
  913. Returns:
  914. A tuple containing the parsed date and time as datetime objects.
  915. Raises:
  916. ValidationError: If the date and time string or time zone string are invalid.
  917. """
  918. date_time = "{}{}".format(event_date, event_tz)
  919. date_time_wtz = "{}".format(event_date)
  920. try:
  921. self.event_date = dateutil.parser.isoparse(date_time)
  922. self.event_date_wtz = dateutil.parser.isoparse(date_time_wtz)
  923. except Exception:
  924. raise marshmallow.exceptions.ValidationError("Invalid date time", field_name="event_date")
  925. return self.event_date, self.event_date_wtz
  926. @pre_load
  927. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  928. """Verifies the data for the event.
  929. This method verifies that the data for the event is valid by checking that all required fields are present and
  930. that the specified asset and IOC IDs are valid.
  931. Args:
  932. data: The data to verify.
  933. kwargs: Additional keyword arguments.
  934. Returns:
  935. The verified data.
  936. Raises:
  937. ValidationError: If the data is invalid.
  938. """
  939. if data is None:
  940. raise marshmallow.exceptions.ValidationError("Received empty data")
  941. for field in ['event_title', 'event_date', 'event_tz', 'event_category_id', 'event_assets', 'event_iocs']:
  942. if field not in data:
  943. raise marshmallow.exceptions.ValidationError(f"Missing field {field}", field_name=field)
  944. assert_type_mml(input_var=int(data.get('event_category_id')),
  945. field_name='event_category_id',
  946. type=int)
  947. event_cat = EventCategory.query.filter(EventCategory.id == int(data.get('event_category_id'))).count()
  948. if not event_cat:
  949. raise marshmallow.exceptions.ValidationError("Invalid event category ID", field_name="event_category_id")
  950. assert_type_mml(input_var=data.get('event_assets'),
  951. field_name='event_assets',
  952. type=list)
  953. for asset in data.get('event_assets'):
  954. assert_type_mml(input_var=int(asset),
  955. field_name='event_assets',
  956. type=int)
  957. ast = CaseAssets.query.filter(CaseAssets.asset_id == asset).count()
  958. if not ast:
  959. raise marshmallow.exceptions.ValidationError("Invalid assets ID", field_name="event_assets")
  960. assert_type_mml(input_var=data.get('event_iocs'),
  961. field_name='event_iocs',
  962. type=list)
  963. for ioc in data.get('event_iocs'):
  964. assert_type_mml(input_var=int(ioc),
  965. field_name='event_iocs',
  966. type=int)
  967. ast = Ioc.query.filter(Ioc.ioc_id == ioc).count()
  968. if not ast:
  969. raise marshmallow.exceptions.ValidationError("Invalid IOC ID", field_name="event_assets")
  970. if data.get('event_color') and data.get('event_color') not in ['#fff', '#1572E899', '#6861CE99', '#48ABF799',
  971. '#31CE3699', '#F2596199', '#FFAD4699']:
  972. data['event_color'] = ''
  973. if data.get('event_tags'):
  974. for tag in data.get('event_tags').split(','):
  975. if not isinstance(tag, str):
  976. raise marshmallow.exceptions.ValidationError("All items in list must be strings",
  977. field_name="event_tags")
  978. add_db_tag(tag.strip())
  979. return data
  980. @post_load
  981. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  982. """Merges custom attributes with the event data.
  983. This method merges any custom attributes specified in the data with the event data. If no custom attributes are
  984. specified, it returns the original data.
  985. Args:
  986. data: The data to merge.
  987. kwargs: Additional keyword arguments.
  988. Returns:
  989. The merged data.
  990. """
  991. new_attr = data.get('custom_attributes')
  992. if new_attr is not None:
  993. assert_type_mml(input_var=data.get('event_id'),
  994. field_name='event_id',
  995. type=int,
  996. allow_none=True)
  997. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('event_id'), 'event')
  998. return data
  999. class DSPathSchema(ma.SQLAlchemyAutoSchema):
  1000. """Schema for serializing and deserializing DataStorePath objects.
  1001. This schema defines the fields to include when serializing and deserializing DataStorePath objects.
  1002. It includes fields for the data store path ID, the data store ID, the path name, and the path description.
  1003. """
  1004. class Meta:
  1005. model = DataStorePath
  1006. load_instance = True
  1007. include_fk = True
  1008. unknown = EXCLUDE
  1009. class DSFileSchema(ma.SQLAlchemyAutoSchema):
  1010. """Schema for serializing and deserializing DataStoreFile objects.
  1011. This schema defines the fields to include when serializing and deserializing DataStoreFile objects.
  1012. It includes fields for the file ID, the original file name, the file description, and the file content.
  1013. """
  1014. file_original_name: str = auto_field('file_original_name', required=True, validate=Length(min=1), allow_none=False)
  1015. file_description: str = auto_field('file_description', allow_none=False)
  1016. file_content: Optional[bytes] = fields.Raw(required=False)
  1017. class Meta:
  1018. model = DataStoreFile
  1019. include_fk = True
  1020. load_instance = True
  1021. unknown = EXCLUDE
  1022. def ds_store_file_b64(self, filename: str, file_content: bytes, dsp: DataStorePath, cid: int) -> Tuple[
  1023. DataStoreFile, bool]:
  1024. """Stores a file in the data store.
  1025. This method stores a file in the data store. If the file already exists in the data store, it returns the
  1026. existing file. Otherwise, it creates a new file and returns it.
  1027. Args:
  1028. filename: The name of the file.
  1029. file_content: The content of the file.
  1030. dsp: The data store path where the file should be stored.
  1031. cid: The ID of the case associated with the file.
  1032. Returns:
  1033. A tuple containing the DataStoreFile object and a boolean indicating whether the file already existed.
  1034. Raises:
  1035. ValidationError: If there is an error storing the file.
  1036. """
  1037. try:
  1038. filename = filename.rstrip().replace('\t', '').replace('\n', '').replace('\r', '')
  1039. file_hash = stream_sha256sum(file_content)
  1040. dsf = DataStoreFile.query.filter(DataStoreFile.file_sha256 == file_hash).first()
  1041. if dsf:
  1042. exists = True
  1043. else:
  1044. dsf = DataStoreFile()
  1045. dsf.file_original_name = filename
  1046. dsf.file_description = "Pasted in notes"
  1047. dsf.file_tags = "notes"
  1048. dsf.file_password = ""
  1049. dsf.file_is_ioc = False
  1050. dsf.file_is_evidence = False
  1051. dsf.file_case_id = cid
  1052. dsf.file_date_added = datetime.datetime.now()
  1053. dsf.added_by_user_id = current_user.id
  1054. dsf.file_local_name = 'tmp_xc'
  1055. dsf.file_parent_id = dsp.path_id
  1056. dsf.file_sha256 = file_hash
  1057. db.session.add(dsf)
  1058. db.session.commit()
  1059. dsf.file_local_name = datastore_get_standard_path(dsf, cid).as_posix()
  1060. db.session.commit()
  1061. with open(dsf.file_local_name, 'wb') as fout:
  1062. fout.write(file_content)
  1063. exists = False
  1064. except Exception as e:
  1065. raise marshmallow.exceptions.ValidationError(
  1066. str(e),
  1067. field_name='file_password'
  1068. )
  1069. setattr(self, 'file_local_path', str(dsf.file_local_name))
  1070. return dsf, exists
  1071. def ds_store_file(self, file_storage: FileStorage, location: Path, is_ioc: bool, password: Optional[str]) -> Tuple[
  1072. str, int, str]:
  1073. """Stores a file in the data store.
  1074. This method stores a file in the data store. If the file is an IOC and no password is provided, it uses a default
  1075. password. If a password is provided, it encrypts the file with the password. It returns the path, size, and hash
  1076. of the stored file.
  1077. Args:
  1078. file_storage: The file to store.
  1079. location: The location where the file should be stored.
  1080. is_ioc: Whether the file is an IOC.
  1081. password: The password to use for encrypting the file.
  1082. Returns:
  1083. A tuple containing the path, size, and hash of the stored file.
  1084. Raises:
  1085. ValidationError: If there is an error storing the file.
  1086. """
  1087. if file_storage is None:
  1088. raise marshmallow.exceptions.ValidationError(
  1089. "No file provided",
  1090. field_name='file_content'
  1091. )
  1092. if not file_storage.filename:
  1093. return None
  1094. passwd = None
  1095. try:
  1096. if is_ioc and not password:
  1097. passwd = 'infected'
  1098. elif password:
  1099. passwd = password
  1100. if passwd is not None:
  1101. try:
  1102. with tempfile.NamedTemporaryFile(delete=False) as tmp:
  1103. file_storage.save(tmp)
  1104. file_storage.close()
  1105. fn = tmp
  1106. file_hash = file_sha256sum(fn.name)
  1107. file_size = os.stat(fn.name).st_size
  1108. file_path = location.as_posix() + '.zip'
  1109. shutil.copyfile(fn.name, Path(fn.name).parent / file_hash)
  1110. pyminizip.compress((Path(fn.name).parent / file_hash).as_posix(), None, file_path, passwd, 0)
  1111. os.unlink(Path(tmp.name).parent / file_hash)
  1112. os.unlink(fn.name)
  1113. except Exception as e:
  1114. log.exception(e)
  1115. raise marshmallow.exceptions.ValidationError(
  1116. str(e),
  1117. field_name='file_password'
  1118. )
  1119. else:
  1120. file_storage.save(location)
  1121. file_storage.close()
  1122. file_path = location.as_posix()
  1123. file_size = location.stat().st_size
  1124. file_hash = file_sha256sum(file_path)
  1125. except Exception as e:
  1126. raise marshmallow.exceptions.ValidationError(
  1127. str(e),
  1128. field_name='file_content'
  1129. )
  1130. if location is None:
  1131. raise marshmallow.exceptions.ValidationError(
  1132. "Unable to save file in target location",
  1133. field_name='file_content'
  1134. )
  1135. setattr(self, 'file_local_path', str(location))
  1136. return file_path, file_size, file_hash
  1137. class ServerSettingsSchema(ma.SQLAlchemyAutoSchema):
  1138. """Schema for serializing and deserializing ServerSettings objects.
  1139. This schema defines the fields to include when serializing and deserializing ServerSettings objects.
  1140. It includes fields for the HTTP proxy, HTTPS proxy, and whether to prevent post-modification repush.
  1141. """
  1142. http_proxy: Optional[str] = fields.String(required=False, allow_none=False)
  1143. https_proxy: Optional[str] = fields.String(required=False, allow_none=False)
  1144. prevent_post_mod_repush: Optional[bool] = fields.Boolean(required=False)
  1145. class Meta:
  1146. model = ServerSettings
  1147. load_instance = True
  1148. unknown = EXCLUDE
  1149. class ContactSchema(ma.SQLAlchemyAutoSchema):
  1150. """Schema for serializing and deserializing Contact objects.
  1151. This schema defines the fields to include when serializing and deserializing Contact objects.
  1152. It includes fields for the contact name, email, work phone, mobile phone, role, note, and client ID.
  1153. """
  1154. contact_name: str = auto_field('contact_name', required=True, validate=Length(min=2), allow_none=False)
  1155. contact_email: Optional[str] = auto_field('contact_email', required=False, allow_none=False)
  1156. contact_work_phone: Optional[str] = auto_field('contact_work_phone', required=False, allow_none=False)
  1157. contact_mobile_phone: Optional[str] = auto_field('contact_mobile_phone', required=False, allow_none=False)
  1158. contact_role: Optional[str] = auto_field('contact_role', required=False, allow_none=False)
  1159. contact_note: Optional[str] = auto_field('contact_note', required=False, allow_none=False)
  1160. client_id: int = auto_field('client_id', required=True)
  1161. class Meta:
  1162. model = Contact
  1163. load_instance = True
  1164. unknown = EXCLUDE
  1165. class CaseClassificationSchema(ma.SQLAlchemyAutoSchema):
  1166. """Schema for serializing and deserializing CaseClassification objects.
  1167. This schema defines the fields to include when serializing and deserializing CaseClassification objects.
  1168. It includes fields for the classification name, expanded name, and description.
  1169. """
  1170. name: str = auto_field('name', required=True, validate=Length(min=2), allow_none=False)
  1171. name_expanded: str = auto_field('name_expanded', required=True, validate=Length(min=2), allow_none=False)
  1172. description: str = auto_field('description', required=True, validate=Length(min=2), allow_none=False)
  1173. class Meta:
  1174. model = CaseClassification
  1175. load_instance = True
  1176. unknown = EXCLUDE
  1177. @post_load
  1178. def verify_unique(self, data, **kwargs):
  1179. """Verifies that the classification name is unique.
  1180. This method verifies that the classification name is unique. If the name is not unique, it raises a validation error.
  1181. Args:
  1182. data: The data to load.
  1183. Returns:
  1184. The loaded data.
  1185. Raises:
  1186. ValidationError: If the classification name is not unique.
  1187. """
  1188. client = CaseClassification.query.filter(
  1189. func.lower(CaseClassification.name) == func.lower(data.name),
  1190. CaseClassification.id != data.id
  1191. ).first()
  1192. if client:
  1193. raise marshmallow.exceptions.ValidationError(
  1194. "Case classification name already exists",
  1195. field_name="name"
  1196. )
  1197. return data
  1198. class EvidenceTypeSchema(ma.SQLAlchemyAutoSchema):
  1199. """Schema for serializing and deserializing EvidenceType objects.
  1200. This schema defines the fields to include when serializing and deserializing EvidenceType objects.
  1201. It includes fields for the evidence type name, expanded name, and description.
  1202. """
  1203. name: str = auto_field('name', required=True, validate=Length(min=2), allow_none=False)
  1204. description: str = auto_field('description', required=True, allow_none=True)
  1205. class Meta:
  1206. model = EvidenceTypes
  1207. load_instance = True
  1208. unknown = EXCLUDE
  1209. @post_load
  1210. def verify_unique(self, data, **kwargs):
  1211. """Verifies that the evidence type name is unique.
  1212. This method verifies that the evidence type name is unique. If the name is not unique, it raises a validation error.
  1213. Args:
  1214. data: The data to load.
  1215. Returns:
  1216. The loaded data.
  1217. Raises:
  1218. ValidationError: If the evidence type name is not unique.
  1219. """
  1220. client = EvidenceTypes.query.filter(
  1221. func.lower(EvidenceTypes.name) == func.lower(data.name),
  1222. EvidenceTypes.id != data.id
  1223. ).first()
  1224. if client:
  1225. raise marshmallow.exceptions.ValidationError(
  1226. "Evidence type already exists",
  1227. field_name="name"
  1228. )
  1229. return data
  1230. class CaseSchema(ma.SQLAlchemyAutoSchema):
  1231. """Schema for serializing and deserializing Case objects.
  1232. This schema defines the fields to include when serializing and deserializing Case objects.
  1233. It includes fields for the case name, description, SOC ID, customer ID, organizations, protagonists, tags, CSRF token,
  1234. initial date, and classification ID.
  1235. """
  1236. case_name: str = auto_field('name', required=True, validate=Length(min=2), allow_none=False)
  1237. case_description: str = auto_field('description', required=True, validate=Length(min=2))
  1238. case_soc_id: int = auto_field('soc_id', required=True)
  1239. case_customer: int = auto_field('client_id', required=True)
  1240. case_organisations: List[int] = fields.List(fields.Integer, required=False)
  1241. protagonists: List[Dict[str, Any]] = fields.List(fields.Dict, required=False)
  1242. case_tags: Optional[str] = fields.String(required=False)
  1243. initial_date: Optional[datetime.datetime] = auto_field('initial_date', required=False)
  1244. classification_id: Optional[int] = auto_field('classification_id', required=False, allow_none=True)
  1245. reviewer_id: Optional[int] = auto_field('reviewer_id', required=False, allow_none=True)
  1246. review_status: Optional[str] = auto_field('review_status', required=False, allow_none=True)
  1247. severity_id: Optional[int] = auto_field('severity_id', required=False, allow_none=True)
  1248. class Meta:
  1249. model = Cases
  1250. include_fk = True
  1251. load_instance = True
  1252. exclude = ['name', 'description', 'soc_id', 'client_id', 'initial_date']
  1253. unknown = EXCLUDE
  1254. @pre_load
  1255. def classification_filter(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1256. """Filters out empty classification IDs.
  1257. This method filters out empty classification IDs from the data.
  1258. Args:
  1259. data: The data to load.
  1260. kwargs: Additional keyword arguments.
  1261. Returns:
  1262. The filtered data.
  1263. """
  1264. if data.get('classification_id') == "":
  1265. del data['classification_id']
  1266. return data
  1267. @pre_load
  1268. def verify_customer(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1269. """Verifies that the customer ID is valid.
  1270. This method verifies that the customer ID specified in the data is valid.
  1271. If the ID is not valid, it raises a validation error.
  1272. Args:
  1273. data: The data to load.
  1274. kwargs: Additional keyword arguments.
  1275. Returns:
  1276. The loaded data.
  1277. Raises:
  1278. ValidationError: If the customer ID is not valid.
  1279. """
  1280. assert_type_mml(input_var=data.get('case_customer'),
  1281. field_name='case_customer',
  1282. type=int,
  1283. allow_none=True)
  1284. client = Client.query.filter(Client.client_id == data.get('case_customer')).first()
  1285. if client:
  1286. return data
  1287. raise marshmallow.exceptions.ValidationError("Invalid client id",
  1288. field_name="case_customer")
  1289. @post_load
  1290. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1291. """Merges custom attributes.
  1292. This method merges the custom attributes specified in the data with the existing custom attributes.
  1293. If there are no custom attributes specified, it returns the original data.
  1294. Args:
  1295. data: The data to load.
  1296. kwargs: Additional keyword arguments.
  1297. Returns:
  1298. The loaded data with merged custom attributes.
  1299. """
  1300. new_attr = data.get('custom_attributes')
  1301. assert_type_mml(input_var=new_attr,
  1302. field_name='custom_attributes',
  1303. type=dict,
  1304. allow_none=True)
  1305. assert_type_mml(input_var=data.get('case_id'),
  1306. field_name='case_id',
  1307. type=int,
  1308. allow_none=True)
  1309. if new_attr is not None:
  1310. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('case_id'), 'case')
  1311. return data
  1312. class CaseStateSchema(ma.SQLAlchemyAutoSchema):
  1313. """Schema for serializing and deserializing CaseState objects.
  1314. This schema defines the fields to include when serializing and deserializing CaseState objects.
  1315. It includes fields for the case state ID, the case ID, the state name, and the state description.
  1316. """
  1317. case_state_id: int = fields.Integer()
  1318. case_id: int = fields.Integer()
  1319. state_name: str = fields.String()
  1320. state_description: str = fields.String()
  1321. class Meta:
  1322. model = CaseState
  1323. load_instance = True
  1324. unknown = EXCLUDE
  1325. class GlobalTasksSchema(ma.SQLAlchemyAutoSchema):
  1326. """Schema for serializing and deserializing GlobalTasks objects.
  1327. This schema defines the fields to include when serializing and deserializing GlobalTasks objects.
  1328. It includes fields for the task ID, assignee ID, task title, and CSRF token.
  1329. """
  1330. task_id: int = auto_field('id')
  1331. task_assignee_id: int = auto_field('task_assignee_id', required=True, allow_none=False)
  1332. task_title: str = auto_field('task_title', required=True, validate=Length(min=2), allow_none=False)
  1333. class Meta:
  1334. model = GlobalTasks
  1335. include_fk = True
  1336. load_instance = True
  1337. exclude = ['id']
  1338. unknown = EXCLUDE
  1339. @pre_load
  1340. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1341. """Verifies that the assignee ID and task status ID are valid.
  1342. This method verifies that the assignee ID and task status ID specified in the data are valid.
  1343. If either ID is not valid, it raises a validation error.
  1344. Args:
  1345. data: The data to load.
  1346. kwargs: Additional keyword arguments.
  1347. Returns:
  1348. The loaded data.
  1349. Raises:
  1350. ValidationError: If the assignee ID or task status ID is not valid.
  1351. """
  1352. assert_type_mml(input_var=data.get('task_assignee_id'),
  1353. field_name='task_assignee_id',
  1354. type=int)
  1355. user = User.query.filter(User.id == data.get('task_assignee_id')).count()
  1356. if not user:
  1357. raise marshmallow.exceptions.ValidationError("Invalid user id for assignee",
  1358. field_name="task_assignees_id")
  1359. assert_type_mml(input_var=data.get('task_status_id'),
  1360. field_name='task_status_id',
  1361. type=int)
  1362. status = TaskStatus.query.filter(TaskStatus.id == data.get('task_status_id')).count()
  1363. if not status:
  1364. raise marshmallow.exceptions.ValidationError("Invalid task status ID",
  1365. field_name="task_status_id")
  1366. return data
  1367. class CustomerSchema(ma.SQLAlchemyAutoSchema):
  1368. """Schema for serializing and deserializing Customer objects.
  1369. This schema defines the fields to include when serializing and deserializing Customer objects.
  1370. It includes fields for the customer name, description, SLA, customer ID, and CSRF token.
  1371. """
  1372. customer_name: str = auto_field('name', required=True, validate=Length(min=2), allow_none=False)
  1373. customer_description: Optional[str] = auto_field('description', allow_none=True)
  1374. customer_sla: Optional[str] = auto_field('sla', allow_none=True)
  1375. customer_id: int = auto_field('client_id')
  1376. class Meta:
  1377. model = Client
  1378. load_instance = True
  1379. exclude = ['name', 'client_id', 'description', 'sla']
  1380. unknown = EXCLUDE
  1381. @post_load
  1382. def verify_unique(self, data: Client, **kwargs: Any) -> Client:
  1383. """Verifies that the customer name is unique.
  1384. This method verifies that the customer name is unique. If the name is not unique, it raises a validation error.
  1385. Args:
  1386. data: The data to load.
  1387. kwargs: Additional keyword arguments.
  1388. Returns:
  1389. The loaded data.
  1390. Raises:
  1391. ValidationError: If the customer name is not unique.
  1392. """
  1393. assert_type_mml(input_var=data.name,
  1394. field_name='customer_name',
  1395. type=str)
  1396. assert_type_mml(input_var=data.client_id,
  1397. field_name='customer_id',
  1398. type=int,
  1399. allow_none=True)
  1400. client = Client.query.filter(
  1401. func.upper(Client.name) == data.name.upper(),
  1402. Client.client_id != data.client_id
  1403. ).first()
  1404. if client:
  1405. raise marshmallow.exceptions.ValidationError(
  1406. "Customer already exists",
  1407. field_name="customer_name"
  1408. )
  1409. return data
  1410. @post_load
  1411. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1412. """Merges custom attributes.
  1413. This method merges the custom attributes specified in the data with the existing custom attributes.
  1414. If there are no custom attributes specified, it returns the original data.
  1415. Args:
  1416. data: The data to load.
  1417. kwargs: Additional keyword arguments.
  1418. Returns:
  1419. The loaded data with merged custom attributes.
  1420. """
  1421. new_attr = data.get('custom_attributes')
  1422. assert_type_mml(input_var=new_attr,
  1423. field_name='custom_attributes',
  1424. type=dict,
  1425. allow_none=True)
  1426. if new_attr is not None:
  1427. assert_type_mml(input_var=data.get('client_id'),
  1428. field_name='customer_id',
  1429. type=int,
  1430. allow_none=True)
  1431. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('client_id'), 'client')
  1432. return data
  1433. class TaskLogSchema(ma.Schema):
  1434. """Schema for serializing and deserializing TaskLog objects.
  1435. This schema defines the fields to include when serializing and deserializing TaskLog objects.
  1436. It includes fields for the log content and CSRF token.
  1437. """
  1438. log_content: Optional[str] = fields.String(required=False, validate=Length(min=1))
  1439. class Meta:
  1440. load_instance = True
  1441. unknown = EXCLUDE
  1442. class AnalysisStatusSchema(ma.SQLAlchemyAutoSchema):
  1443. """Schema for serializing and deserializing AnalysisStatus objects.
  1444. This schema defines the fields to include when serializing and deserializing AnalysisStatus objects.
  1445. It includes fields for the analysis status name and analysis status value.
  1446. """
  1447. class Meta:
  1448. model = AnalysisStatus
  1449. load_instance = True
  1450. unknown = EXCLUDE
  1451. class TaskStatusSchema(ma.SQLAlchemyAutoSchema):
  1452. """Schema for serializing and deserializing TaskStatus objects.
  1453. This schema defines the fields to include when serializing and deserializing TaskStatus objects.
  1454. It includes fields for the task status name, task status value, and CSRF token.
  1455. """
  1456. class Meta:
  1457. model = TaskStatus
  1458. load_instance = True
  1459. unknown = EXCLUDE
  1460. class CaseTaskSchema(ma.SQLAlchemyAutoSchema):
  1461. """Schema for serializing and deserializing CaseTask objects.
  1462. This schema defines the fields to include when serializing and deserializing CaseTask objects.
  1463. It includes fields for the task title, task status ID, task assignees ID, task assignees, and CSRF token.
  1464. """
  1465. task_title: str = auto_field('task_title', required=True, validate=Length(min=2), allow_none=False)
  1466. task_status_id: int = auto_field('task_status_id', required=True)
  1467. task_assignees_id: Optional[List[int]] = fields.List(fields.Integer, required=False, allow_none=True)
  1468. task_assignees: Optional[List[Dict[str, Any]]] = fields.List(fields.Dict, required=False, allow_none=True)
  1469. status = ma.Nested(TaskStatusSchema)
  1470. case = ma.Nested(CaseSchema, only=['case_name', 'case_id'])
  1471. class Meta:
  1472. model = CaseTasks
  1473. load_instance = True
  1474. include_fk = True
  1475. unknown = EXCLUDE
  1476. @pre_load
  1477. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1478. """Verifies that the task status ID is valid.
  1479. This method verifies that the task status ID specified in the data is valid.
  1480. If the ID is not valid, it raises a validation error.
  1481. Args:
  1482. data: The data to load.
  1483. kwargs: Additional keyword arguments.
  1484. Returns:
  1485. The loaded data.
  1486. Raises:
  1487. ValidationError: If the task status ID is not valid.
  1488. """
  1489. assert_type_mml(input_var=data.get('task_status_id'),
  1490. field_name='task_status_id',
  1491. type=int)
  1492. status = TaskStatus.query.filter(TaskStatus.id == data.get('task_status_id')).count()
  1493. if not status:
  1494. raise marshmallow.exceptions.ValidationError("Invalid task status ID",
  1495. field_name="task_status_id")
  1496. if data.get('task_tags'):
  1497. for tag in data.get('task_tags').split(','):
  1498. if not isinstance(tag, str):
  1499. raise marshmallow.exceptions.ValidationError("All items in list must be strings",
  1500. field_name="task_tags")
  1501. add_db_tag(tag.strip())
  1502. return data
  1503. @post_load
  1504. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1505. """Merges custom attributes.
  1506. This method merges the custom attributes specified in the data with the existing custom attributes.
  1507. If there are no custom attributes specified, it returns the original data.
  1508. Args:
  1509. data: The data to load.
  1510. kwargs: Additional keyword arguments.
  1511. Returns:
  1512. The loaded data with merged custom attributes.
  1513. """
  1514. new_attr = data.get('custom_attributes')
  1515. assert_type_mml(input_var=new_attr,
  1516. field_name='custom_attributes',
  1517. type=dict,
  1518. allow_none=True)
  1519. assert_type_mml(input_var=data.get('id'),
  1520. field_name='task_id',
  1521. type=int,
  1522. allow_none=True)
  1523. if new_attr is not None:
  1524. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('id'), 'task')
  1525. return data
  1526. class CaseEvidenceSchema(ma.SQLAlchemyAutoSchema):
  1527. """Schema for serializing and deserializing CaseEvidence objects.
  1528. This schema defines the fields to include when serializing and deserializing CaseEvidence objects.
  1529. It includes fields for the filename and CSRF token.
  1530. """
  1531. filename: str = auto_field('filename', required=True, validate=Length(min=2), allow_none=False)
  1532. type = ma.Nested(EvidenceTypeSchema)
  1533. user = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  1534. class Meta:
  1535. model = CaseReceivedFile
  1536. load_instance = True
  1537. include_relationships = True
  1538. include_fk = True
  1539. unknown = EXCLUDE
  1540. @post_load
  1541. def custom_attributes_merge(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1542. """Merges custom attributes.
  1543. This method merges the custom attributes specified in the data with the existing custom attributes.
  1544. If there are no custom attributes specified, it returns the original data.
  1545. Args:
  1546. data: The data to load.
  1547. kwargs: Additional keyword arguments.
  1548. Returns:
  1549. The loaded data with merged custom attributes.
  1550. """
  1551. new_attr = data.get('custom_attributes')
  1552. assert_type_mml(input_var=new_attr,
  1553. field_name='custom_attributes',
  1554. type=dict,
  1555. allow_none=True)
  1556. if new_attr is not None:
  1557. assert_type_mml(input_var=data.get('id'),
  1558. field_name='evidence_id',
  1559. type=int,
  1560. allow_none=True)
  1561. data['custom_attributes'] = merge_custom_attributes(new_attr, data.get('id'), 'evidence')
  1562. return data
  1563. class AuthorizationGroupSchema(ma.SQLAlchemyAutoSchema):
  1564. """Schema for serializing and deserializing AuthorizationGroup objects.
  1565. This schema defines the fields to include when serializing and deserializing AuthorizationGroup objects.
  1566. It includes fields for the group name, group description, group auto follow access level, and group permissions.
  1567. """
  1568. group_name: str = auto_field('group_name', required=True, validate=Length(min=2), allow_none=False)
  1569. group_description: str = auto_field('group_description', required=True, validate=Length(min=2))
  1570. group_auto_follow_access_level: Optional[bool] = auto_field('group_auto_follow_access_level', required=False,
  1571. default=False)
  1572. group_permissions: int = fields.Integer(required=False)
  1573. group_members: Optional[List[Dict[str, Any]]] = fields.List(fields.Dict, required=False, allow_none=True)
  1574. group_permissions_list: Optional[List[Dict[str, Any]]] = fields.List(fields.Dict, required=False, allow_none=True)
  1575. group_cases_access: Optional[List[Dict[str, Any]]] = fields.List(fields.Dict, required=False, allow_none=True)
  1576. class Meta:
  1577. model = Group
  1578. load_instance = True
  1579. include_fk = True
  1580. unknown = EXCLUDE
  1581. @pre_load
  1582. def verify_unique(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1583. """Verifies that the group name is unique.
  1584. This method verifies that the group name specified in the data is unique.
  1585. If the name is not unique, it raises a validation error.
  1586. Args:
  1587. data: The data to load.
  1588. kwargs: Additional keyword arguments.
  1589. Returns:
  1590. The loaded data.
  1591. Raises:
  1592. ValidationError: If the group name is not unique.
  1593. """
  1594. assert_type_mml(input_var=data.get('group_name'),
  1595. field_name='group_name',
  1596. type=str)
  1597. groups = Group.query.filter(
  1598. func.upper(Group.group_name) == data.get('group_name').upper()
  1599. ).all()
  1600. for group in groups:
  1601. if data.get('group_id') is None or group.group_id != data.get('group_id'):
  1602. raise marshmallow.exceptions.ValidationError(
  1603. "Group already exists",
  1604. field_name="group_name"
  1605. )
  1606. return data
  1607. @pre_load
  1608. def parse_permissions(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1609. """Parses the group permissions.
  1610. This method parses the group permissions specified in the data and converts them to an access control mask.
  1611. If no permissions are specified, it sets the mask to 0.
  1612. Args:
  1613. data: The data to load.
  1614. kwargs: Additional keyword arguments.
  1615. Returns:
  1616. The loaded data with the access control mask.
  1617. """
  1618. permissions = data.get('group_permissions')
  1619. if type(permissions) != list and not isinstance(permissions, type(None)):
  1620. permissions = [permissions]
  1621. if permissions is not None:
  1622. data['group_permissions'] = ac_mask_from_val_list(permissions)
  1623. else:
  1624. data['group_permissions'] = 0
  1625. return data
  1626. class AuthorizationOrganisationSchema(ma.SQLAlchemyAutoSchema):
  1627. """Schema for serializing and deserializing AuthorizationOrganisation objects.
  1628. This schema defines the fields to include when serializing and deserializing AuthorizationOrganisation objects.
  1629. It includes fields for the organization name and description.
  1630. """
  1631. org_name: str = auto_field('org_name', required=True, validate=Length(min=2), allow_none=False)
  1632. org_description: str = auto_field('org_description', required=True, validate=Length(min=2))
  1633. class Meta:
  1634. model = Organisation
  1635. load_instance = True
  1636. unknown = EXCLUDE
  1637. @pre_load
  1638. def verify_unique(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1639. """Verifies that the organization name is unique.
  1640. This method verifies that the organization name specified in the data is unique.
  1641. If the name is not unique, it raises a validation error.
  1642. Args:
  1643. data: The data to load.
  1644. kwargs: Additional keyword arguments.
  1645. Returns:
  1646. The loaded data.
  1647. Raises:
  1648. ValidationError: If the organization name is not unique.
  1649. """
  1650. assert_type_mml(input_var=data.get('org_name'),
  1651. field_name='org_name',
  1652. type=str)
  1653. organisations = Organisation.query.filter(
  1654. func.upper(Organisation.org_name) == data.get('org_name').upper()
  1655. ).all()
  1656. for organisation in organisations:
  1657. if data.get('org_id') is None or organisation.org_id != data.get('org_id'):
  1658. raise marshmallow.exceptions.ValidationError(
  1659. "Organisation name already exists",
  1660. field_name="org_name"
  1661. )
  1662. return data
  1663. class BasicUserSchema(ma.SQLAlchemyAutoSchema):
  1664. """Schema for serializing and deserializing basic User objects.
  1665. This schema defines the fields to include when serializing and deserializing basic User objects.
  1666. It includes fields for the user name, login, and email.
  1667. """
  1668. user_id: Optional[int] = auto_field('id', required=False)
  1669. user_uuid: Optional[str] = auto_field('uuid', required=False)
  1670. user_name: str = auto_field('name', required=True, validate=Length(min=2))
  1671. user_login: str = auto_field('user', required=True, validate=Length(min=2))
  1672. user_email: str = auto_field('email', required=True, validate=Length(min=2))
  1673. has_deletion_confirmation: Optional[bool] = auto_field('has_deletion_confirmation', required=False, default=False)
  1674. class Meta:
  1675. model = User
  1676. load_instance = True
  1677. exclude = ['password', 'api_key', 'ctx_case', 'ctx_human_case', 'active', 'external_id', 'in_dark_mode',
  1678. 'id', 'name', 'email', 'user', 'uuid']
  1679. unknown = EXCLUDE
  1680. def validate_ioc_type(type_id: int) -> None:
  1681. """Validates the IOC type ID.
  1682. This function validates the IOC type ID by checking if it exists in the database.
  1683. If the ID is invalid, it raises a validation error.
  1684. Args:
  1685. type_id: The IOC type ID to validate.
  1686. Raises:
  1687. ValidationError: If the IOC type ID is invalid.
  1688. """
  1689. if not IocType.query.get(type_id):
  1690. raise ValidationError("Invalid ioc_type ID")
  1691. def validate_ioc_tlp(tlp_id: int) -> None:
  1692. """Validates the IOC TLP ID.
  1693. This function validates the IOC TLP ID by checking if it exists in the database.
  1694. If the ID is invalid, it raises a validation error.
  1695. Args:
  1696. tlp_id: The IOC TLP ID to validate.
  1697. Raises:
  1698. ValidationError: If the IOC TLP ID is invalid.
  1699. """
  1700. if not Tlp.query.get(tlp_id):
  1701. raise ValidationError("Invalid ioc_tlp ID")
  1702. def validate_asset_type(asset_id: int) -> None:
  1703. """Validates the asset type ID.
  1704. This function validates the asset type ID by checking if it exists in the database.
  1705. If the ID is invalid, it raises a validation error.
  1706. Args:
  1707. asset_id: The asset type ID to validate.
  1708. Raises:
  1709. ValidationError: If the asset type ID is invalid.
  1710. """
  1711. if not AssetsType.query.get(asset_id):
  1712. raise ValidationError("Invalid asset_type ID")
  1713. def validate_asset_tlp(tlp_id: int) -> None:
  1714. """Validates the asset TLP ID.
  1715. This function validates the asset TLP ID by checking if it exists in the database.
  1716. If the ID is invalid, it raises a validation error.
  1717. Args:
  1718. tlp_id: The asset TLP ID to validate.
  1719. Raises:
  1720. ValidationError: If the asset TLP ID is invalid.
  1721. """
  1722. if not Tlp.query.get(tlp_id):
  1723. raise ValidationError("Invalid asset_tlp ID")
  1724. class SeveritySchema(ma.SQLAlchemyAutoSchema):
  1725. """Schema for serializing and deserializing Severity objects.
  1726. This schema defines the fields to include when serializing and deserializing Severity objects.
  1727. It includes fields for the severity name and severity value.
  1728. """
  1729. class Meta:
  1730. model = Severity
  1731. load_instance = True
  1732. unknown = EXCLUDE
  1733. class AlertStatusSchema(ma.SQLAlchemyAutoSchema):
  1734. """Schema for serializing and deserializing AlertStatus objects.
  1735. This schema defines the fields to include when serializing and deserializing AlertStatus objects.
  1736. It includes fields for the alert status name and alert status value.
  1737. """
  1738. class Meta:
  1739. model = AlertStatus
  1740. load_instance = True
  1741. unknown = EXCLUDE
  1742. class AlertResolutionSchema(ma.SQLAlchemyAutoSchema):
  1743. """Schema for serializing and deserializing AlertResolution objects.
  1744. This schema defines the fields to include when serializing and deserializing AlertStatus objects.
  1745. It includes fields for the alert status name and alert status value.
  1746. """
  1747. class Meta:
  1748. model = AlertResolutionStatus
  1749. load_instance = True
  1750. unknown = EXCLUDE
  1751. class EventCategorySchema(ma.SQLAlchemyAutoSchema):
  1752. """Schema for serializing and deserializing EventCategory objects.
  1753. This schema defines the fields to include when serializing and deserializing EventCategory objects.
  1754. It includes fields for the event category name and event category value.
  1755. """
  1756. class Meta:
  1757. model = EventCategory
  1758. load_instance = True
  1759. unknown = EXCLUDE
  1760. class AlertSchema(ma.SQLAlchemyAutoSchema):
  1761. """Schema for serializing and deserializing Alert objects.
  1762. This schema defines the fields to include when serializing and deserializing Alert objects.
  1763. It includes fields for the alert severity, status, customer, classification, owner, IOCs, and assets.
  1764. """
  1765. severity = ma.Nested(SeveritySchema)
  1766. status = ma.Nested(AlertStatusSchema)
  1767. customer = ma.Nested(CustomerSchema)
  1768. classification = ma.Nested(CaseClassificationSchema)
  1769. owner = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  1770. iocs = ma.Nested(IocSchema, many=True)
  1771. assets = ma.Nested(CaseAssetsSchema, many=True, exclude=['alerts'])
  1772. resolution_status = ma.Nested(AlertResolutionSchema)
  1773. class Meta:
  1774. model = Alert
  1775. include_relationships = True
  1776. include_fk = True
  1777. load_instance = True
  1778. unknown = EXCLUDE
  1779. @pre_load
  1780. def verify_data(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1781. """
  1782. Verify that the alert tags are valid and save them if they don't exist
  1783. """
  1784. if data.get('alert_tags'):
  1785. for tag in data.get('alert_tags').split(','):
  1786. if not isinstance(tag, str):
  1787. raise marshmallow.exceptions.ValidationError("All items in list must be strings",
  1788. field_name="alert_tags")
  1789. add_db_tag(tag.strip())
  1790. return data
  1791. class SavedFilterSchema(ma.SQLAlchemyAutoSchema):
  1792. """Schema for serializing and deserializing SavedFilter objects.
  1793. This schema defines the fields to include when serializing and deserializing SavedFilter objects.
  1794. """
  1795. class Meta:
  1796. model = SavedFilter
  1797. load_instance = True
  1798. include_fk = True
  1799. include_relationships = True
  1800. unknown = EXCLUDE
  1801. class IrisModuleSchema(ma.SQLAlchemyAutoSchema):
  1802. class Meta:
  1803. model = IrisModule
  1804. load_instance = True
  1805. unknown = EXCLUDE
  1806. class ModuleHooksSchema(ma.SQLAlchemyAutoSchema):
  1807. """Schema for serializing and deserializing ModuleHooks objects.
  1808. This schema defines the fields to include when serializing and deserializing ModuleHooks objects.
  1809. """
  1810. class Meta:
  1811. model = IrisModuleHook
  1812. load_instance = True
  1813. include_fk = True
  1814. include_relationships = True
  1815. unknown = EXCLUDE
  1816. class TagsSchema(ma.SQLAlchemyAutoSchema):
  1817. class Meta:
  1818. model = Tags
  1819. load_instance = True
  1820. include_fk = True
  1821. include_relationships = True
  1822. unknown = EXCLUDE
  1823. class ReviewStatusSchema(ma.SQLAlchemyAutoSchema):
  1824. class Meta:
  1825. model = ReviewStatus
  1826. load_instance = True
  1827. include_fk = True
  1828. include_relationships = True
  1829. unknown = EXCLUDE
  1830. class CaseProtagonistSchema(ma.SQLAlchemyAutoSchema):
  1831. """Schema for serializing and deserializing CaseProtagonist objects."""
  1832. class Meta:
  1833. model = CaseProtagonist
  1834. load_instance = True
  1835. include_fk = True
  1836. include_relationships = True
  1837. # This is the new schema for /api/v2/cases. It's in between CaseSchema and CaseDetailsSchema
  1838. # The goal was to have the same type for the cases returned in the following endpoints:
  1839. # * GET /api/v2/cases
  1840. # * POST /api/v2/cases
  1841. # * GET /api/v2/cases/{identifier}
  1842. # TODO The objective could then be to remove CaseSchema and CaseDetailsSchema
  1843. class CaseSchemaForAPIV2(ma.SQLAlchemyAutoSchema):
  1844. """Schema for serializing and deserializing Case objects.
  1845. This schema defines the fields to include when serializing and deserializing Case objects.
  1846. It includes fields for the case name, description, SOC ID, customer ID, organizations, protagonists, tags, CSRF token,
  1847. initial date, and classification ID.
  1848. """
  1849. case_name: str = auto_field('name', required=True, validate=Length(min=2), allow_none=False)
  1850. case_description: str = auto_field('description', required=True, validate=Length(min=2))
  1851. case_soc_id: int = auto_field('soc_id', required=True)
  1852. case_customer_id: int = auto_field('client_id', required=True)
  1853. case_organisations: List[int] = fields.List(fields.Integer, required=False)
  1854. protagonists: List[Dict[str, Any]] = fields.List(fields.Dict, required=False)
  1855. case_tags: Optional[str] = fields.String(required=False)
  1856. initial_date: Optional[datetime.datetime] = auto_field('initial_date', required=False)
  1857. classification_id: Optional[int] = auto_field('classification_id', required=False, allow_none=True)
  1858. reviewer_id: Optional[int] = auto_field('reviewer_id', required=False, allow_none=True)
  1859. owner = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  1860. state = ma.Nested(CaseStateSchema)
  1861. case_customer = ma.Nested(CustomerSchema)
  1862. review_status = ma.Nested(ReviewStatusSchema)
  1863. class Meta:
  1864. model = Cases
  1865. include_fk = True
  1866. load_instance = True
  1867. exclude = ['name', 'description', 'soc_id', 'client_id', 'initial_date', 'state_id', 'owner_id']
  1868. unknown = EXCLUDE
  1869. @pre_load
  1870. def classification_filter(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1871. """Filters out empty classification IDs.
  1872. This method filters out empty classification IDs from the data.
  1873. Args:
  1874. data: The data to load.
  1875. kwargs: Additional keyword arguments.
  1876. Returns:
  1877. The filtered data.
  1878. """
  1879. if data.get('classification_id') == "":
  1880. del data['classification_id']
  1881. return data
  1882. @pre_load
  1883. def verify_customer(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]:
  1884. """Verifies that the customer ID is valid.
  1885. This method verifies that the customer ID specified in the data is valid.
  1886. If the ID is not valid, it raises a validation error.
  1887. Args:
  1888. data: The data to load.
  1889. kwargs: Additional keyword arguments.
  1890. Returns:
  1891. The loaded data.
  1892. Raises:
  1893. ValidationError: If the customer ID is not valid.
  1894. """
  1895. assert_type_mml(input_var=data.get('case_customer'),
  1896. field_name='case_customer',
  1897. type=int,
  1898. allow_none=True)
  1899. client = Client.query.filter(Client.client_id == data.get('case_customer')).first()
  1900. if client:
  1901. return data
  1902. raise marshmallow.exceptions.ValidationError("Invalid client id",
  1903. field_name="case_customer")
  1904. class CaseDetailsSchema(ma.SQLAlchemyAutoSchema):
  1905. """Schema for serializing and deserializing Case objects in details."""
  1906. client = ma.Nested(CustomerSchema)
  1907. owner = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  1908. classification = ma.Nested(CaseClassificationSchema)
  1909. state = ma.Nested(CaseStateSchema)
  1910. tags = ma.Nested(TagsSchema, many=True, only=['tag_title', 'id'])
  1911. user = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  1912. reviewer = ma.Nested(UserSchema, only=['id', 'user_name', 'user_login', 'user_email'])
  1913. review_status = ma.Nested(ReviewStatusSchema)
  1914. severity = ma.Nested(SeveritySchema)
  1915. def get_status_name(self, obj):
  1916. return CaseStatus(obj.status_id).name
  1917. def get_protagonists(self, obj):
  1918. cp = CaseProtagonist.query.with_entities(
  1919. CaseProtagonist.role,
  1920. CaseProtagonist.name,
  1921. CaseProtagonist.contact,
  1922. User.name.label('user_name'),
  1923. User.user.label('user_login')
  1924. ).filter(
  1925. CaseProtagonist.case_id == obj.case_id
  1926. ).outerjoin(
  1927. CaseProtagonist.user
  1928. ).all()
  1929. cp = CaseProtagonistSchema(many=True).dump(cp)
  1930. return cp
  1931. status_name = ma.Method('get_status_name')
  1932. protagonists = ma.Method('get_protagonists')
  1933. class Meta:
  1934. model = Cases
  1935. include_fk = True
  1936. load_instance = True
  1937. include_relationships = True
  1938. unknown = EXCLUDE