22
33import json
44from functools import cache
5+ from pathlib import Path
56from typing import TYPE_CHECKING , Any
67
78from dissect .database .ese .ntds import NTDS
89from dissect .database .ese .ntds .c_sd import c_sd
910from dissect .database .ese .ntds .util import UserAccountControl
1011
12+ from dissect .target .plugin import Plugin , UnsupportedPluginError , arg , export
13+
1114if TYPE_CHECKING :
12- from collections .abc import Callable , Iterator
13- from pathlib import Path
15+ from collections .abc import Iterator
1416
1517 from dissect .database .ese .ntds import NTDS
16- from dissect .database .ese .ntds .objects import Object , SecurityObject
1718 from dissect .database .ese .ntds .sd import SecurityDescriptor
1819 from flow .record import Record
1920
@@ -153,10 +154,10 @@ def extract_sd_data(ntds: NTDS, nt_security_descriptor: int | None) -> tuple[boo
153154 return c_sd .SECURITY_DESCRIPTOR_CONTROL .SE_DACL_PROTECTED .name in sd .header .Control .name .split ("|" ), aces
154155
155156
156- class BloodHoundExporter :
157- def __init__ (self , ntds : NTDS , output_dir : Path ) -> None :
158- self .ntds : NTDS = ntds
159- self . output_dir : Path = output_dir
157+ class BloodHound ( Plugin ) :
158+ def check_compatible (self ) -> None :
159+ if not self .target . has_function ( "ad" ):
160+ raise UnsupportedPluginError ( "ad plugin is not initialized" )
160161
161162 @staticmethod
162163 def extract_high_value (obj : Record ) -> str | None :
@@ -170,8 +171,31 @@ def extract_domain_id(obj: Record) -> str | None:
170171 def extract_flag_from_enum (obj : Record , flag : UserAccountControl ) -> bool :
171172 return flag .name in obj .user_account_control .split ("|" )
172173
173- def iterate_domains (self , domains : Callable [[], Iterator [Record ]]):
174- for domain in domains ():
174+ def extract_generic_info (self , obj : Record ) -> dict [str , Any ]:
175+ is_acl_protected , aces = extract_sd_data (self .ntds , obj .nt_security_descriptor )
176+
177+ contained_by = None
178+ if obj .parent_guid and obj .parent_type :
179+ contained_by = {"ObjectIdentifier" : obj .parent_guid , "ObjectType" : obj .parent_type }
180+
181+ return {
182+ "ObjectIdentifier" : obj .sid ,
183+ "IsDeleted" : obj .is_deleted .value ,
184+ "IsACLProtected" : is_acl_protected ,
185+ "Aces" : aces ,
186+ "ContainedBy" : contained_by ,
187+ }
188+
189+ def extract_generic_properties (self , obj : Record ) -> dict [str , Any ]:
190+ return {
191+ "domain" : obj .domain , # TODO: Make sure this is robust because it's not from ntds.dit
192+ "name" : obj .name ,
193+ "distinguishedname" : obj .distinguished_name ,
194+ "enabled" : not self .extract_flag_from_enum (obj , UserAccountControl .ACCOUNTDISABLE ),
195+ }
196+
197+ def translate_domains (self ) -> Iterator [dict [str , Any ]]:
198+ for domain in self .target .ad .domains ():
175199 yield {
176200 "ObjectIdentifier" : domain .sid ,
177201 "Properties" : {
@@ -186,24 +210,17 @@ def iterate_domains(self, domains: Callable[[], Iterator[Record]]):
186210 "Links" : [],
187211 }
188212
189- def iterate_users (self , users : Callable [[], Iterator [ Record ]] ) -> Iterator [dict [str , Any ]]:
213+ def translate_users (self ) -> Iterator [dict [str , Any ]]:
190214 """Iterate over user records and yield BloodHound-formatted dictionaries."""
191- for user in users ():
192- is_acl_protected , aces = extract_sd_data (self .ntds , user .nt_security_descriptor )
215+ for user in self .target .ad .users ():
193216 yield {
194- "ObjectIdentifier" : user .sid ,
195- "IsDeleted" : user .is_deleted .value ,
196- "IsACLProtected" : is_acl_protected ,
217+ ** self .extract_generic_info (user ),
197218 "HasSIDHistory" : user .sid_history ,
198219 "SPNTargets" : user .service_principal_names , # TODO: Verify this is correct for SPN targeting in BloodHound
199220 "PrimaryGroupSID" : user .sid .replace (f"-{ user .rid } " , f"-{ user .primary_group_id } " ),
200221 "AllowedToDelegate" : user .allowed_to_delegate ,
201- "ContainedBy" : {"ObjectIdentifier" : user .parent_guid , "ObjectType" : user .parent_type },
202- "Aces" : aces ,
203222 "Properties" : {
204- "domain" : user .domain , # TODO: Make sure this is robust because it's not from ntds.dit
205- "name" : user .name ,
206- "distinguishedname" : user .distinguished_name ,
223+ ** self .extract_generic_properties (user ),
207224 "domainsid" : self .extract_domain_id (user ),
208225 "highvalue" : self .extract_high_value (user ),
209226 "samaccountname" : user .sam_name ,
@@ -215,7 +232,6 @@ def iterate_users(self, users: Callable[[], Iterator[Record]]) -> Iterator[dict[
215232 user , UserAccountControl .TRUSTED_FOR_DELEGATION
216233 ),
217234 "pwdneverexpires" : self .extract_flag_from_enum (user , UserAccountControl .DONT_EXPIRE_PASSWORD ),
218- "enabled" : not self .extract_flag_from_enum (user , UserAccountControl .ACCOUNTDISABLE ),
219235 "trustedtoauth" : bool (user .allowed_to_delegate )
220236 and self .extract_flag_from_enum (user , UserAccountControl .TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION ),
221237 "lastlogon" : user .logon_last_success_observed .isoformat ()
@@ -233,31 +249,27 @@ def iterate_users(self, users: Callable[[], Iterator[Record]]) -> Iterator[dict[
233249 "homedirectory" : user .home_directory ,
234250 "userpassword" : None ,
235251 "unixpassword" : None ,
236- "unicodepassword" : user .nt , # TODO: figure out lm hash goes here or not
252+ "unicodepassword" : user .nt , # TODO: Figure out lm hash goes here or not
237253 "sfupassword" : None ,
238254 "logonscript" : user .logon_script ,
239255 "admincount" : user .admin_count .value ,
240256 "sidhistory" : user .sid_history ,
241257 },
242258 }
243259
244- def iterate_computers (self , computers : Callable [[], Iterator [ Record ]]) :
245- for computer in computers ():
260+ def translate_computers (self ) -> Iterator [ dict [ str , Any ]] :
261+ for computer in self . target . ad . computers ():
246262 yield {
247- "ObjectIdentifier" : computer . sid ,
263+ ** self . extract_generic_info ( computer ) ,
248264 "Properties" : {
249- "domain" : self .extract_domain_id (computer ),
250- "name" : computer .dns_hostname ,
251- "distinguishedname" : computer .distinguished_name ,
265+ ** self .extract_generic_properties (computer ),
252266 "operatingsystem" : computer .operating_system ,
253- "enabled" : self .extract_flag_from_enum (computer ),
254267 },
255- "Aces" : extract_sd_data (self .ntds , computer .nt_security_descriptor ),
256268 "AllowedToDelegate" : computer .allowed_to_delegate ,
257269 }
258270
259- def iterate_groups (self , groups : Callable [[], Iterator [ Record ]]) :
260- for group in groups ():
271+ def translate_groups (self ) -> Iterator [ dict [ str , Any ]] :
272+ for group in self . target . ad . groups ():
261273 yield {
262274 "ObjectIdentifier" : group .sid ,
263275 "Properties" : {
@@ -269,8 +281,8 @@ def iterate_groups(self, groups: Callable[[], Iterator[Record]]):
269281 "Members" : group .members ,
270282 }
271283
272- def iterate_ous (self , ous : Callable [[], Iterator [ Record ]]) :
273- for ou in ous ():
284+ def translate_ous (self ) -> Iterator [ dict [ str , Any ]] :
285+ for ou in self . target . ad . ous ():
274286 yield {
275287 "ObjectIdentifier" : ou .sid ,
276288 "Properties" : {
@@ -283,8 +295,8 @@ def iterate_ous(self, ous: Callable[[], Iterator[Record]]):
283295 "Links" : [],
284296 }
285297
286- def iterate_gpos (self , group_policies : Callable [[], Iterator [ Record ]]) :
287- for gpo in group_policies ():
298+ def translate_gpos (self ) -> Iterator [ dict [ str , Any ]] :
299+ for gpo in self . target . ad . gpos ():
288300 yield {
289301 "ObjectIdentifier" : gpo .sid ,
290302 "Properties" : {
@@ -295,32 +307,38 @@ def iterate_gpos(self, group_policies: Callable[[], Iterator[Record]]):
295307 "Aces" : extract_sd_data (self .ntds , gpo .nt_security_descriptor ),
296308 }
297309
298- def write_bloodhound_json (self , records : Callable [[], Iterator [dict [str , Any ]]]) -> None :
310+ @arg ("-o" , "--output" , dest = "output_dir" , type = Path , required = True , help = "Path to extract BloodHound files to" )
311+ @export (output = "none" )
312+ def bloodhound (self , output_dir : Path ) -> None :
313+ """Extract AD objects in BloodHound format and write them iteratively to disk."""
314+
299315 TYPE_TO_FUNCTION_MAPPING = {
300- "users" : self .iterate_users ,
301- "computers" : self .iterate_computers ,
302- "domains" : self .iterate_domains ,
303- "groups" : self .iterate_groups ,
304- "ous" : self .iterate_ous ,
305- "gpos" : self .iterate_gpos ,
316+ "users" : self .translate_users ,
317+ "computers" : self .translate_computers ,
318+ "domains" : self .translate_domains ,
319+ "groups" : self .translate_groups ,
320+ "ous" : self .translate_ous ,
321+ "gpos" : self .translate_gpos ,
306322 }
307323
308- object_type = records .__name__
309- output_path = self .output_dir .joinpath (object_type ).with_suffix (".json" )
324+ output_dir .mkdir (parents = True , exist_ok = True )
325+
326+ for object_type , translation_function in TYPE_TO_FUNCTION_MAPPING .items ():
327+ output_path = output_dir .joinpath (object_type ).with_suffix (".json" )
310328
311- metadata = {"methods" : 0 , "type" : object_type , "version" : 6 , "count" : 0 }
312- json_start = '{\n \t "data": [\n \t \t '
329+ metadata = {"methods" : 0 , "type" : object_type , "version" : 6 , "count" : 0 }
330+ json_start = '{\n \t "data": [\n \t \t '
313331
314- with output_path .open ("w" , encoding = "utf-8" ) as output_handle :
315- output_handle .write (json_start )
316- first = True
317- for item in TYPE_TO_FUNCTION_MAPPING [ object_type ]( records ):
318- if not first :
319- output_handle .write (",\n \t \t " )
332+ with output_path .open ("w" , encoding = "utf-8" ) as output_handle :
333+ output_handle .write (json_start )
334+ first = True
335+ for item in translation_function ( ):
336+ if not first :
337+ output_handle .write (",\n \t \t " )
320338
321- metadata ["count" ] += 1
322- output_handle .write (json .dumps (item ))
323- first = False
339+ metadata ["count" ] += 1
340+ output_handle .write (json .dumps (item ))
341+ first = False
324342
325- json_end = '\n \t \t ], \n \t "meta": ' + json .dumps (metadata ) + "\n }\n "
326- output_handle .write (json_end )
343+ json_end = '\n \t \t ], \n \t "meta": ' + json .dumps (metadata ) + "\n }\n "
344+ output_handle .write (json_end )
0 commit comments