77import  logging 
88from  glob  import  glob 
99from  datetime  import  datetime 
10- from  tempfile  import  NamedTemporaryFile 
10+ from  tempfile  import  NamedTemporaryFile ,  gettempdir 
1111try :
1212    # python 2 
1313    from  urllib  import  quote  as  urlencode 
2121from  insights .client .utilities  import  (
2222    generate_machine_id , write_data_to_file , get_time 
2323)
24- from  insights .util .subproc  import  call , CalledProcessError 
24+ from  insights .core .exceptions  import  CalledProcessError 
25+ from  insights .util .subproc  import  call 
2526
2627logger  =  logging .getLogger (__name__ )
2728MIN_YARA_VERSION  =  "4.1.0" 
@@ -186,6 +187,7 @@ def __init__(self, insights_config):
186187        self .add_metadata  =  self ._get_config_option ('add_metadata' , False )
187188
188189        self .matches  =  0 
190+         self .potential_matches  =  0 
189191
190192    def  run (self ):
191193        # Start the scans and record the time they were started 
@@ -201,7 +203,11 @@ def run(self):
201203
202204            # Write a message to user informing them if there were matches or not and what to do next 
203205            if  self .matches  ==  0 :
204-                 logger .info ("No rule matches found.\n " )
206+                 if  self .potential_matches  ==  0 :
207+                     logger .info ("No rule matches found.\n " )
208+                 else :
209+                     logger .info ("Rule matches potentially found but problems encountered parsing them, so no match data to upload." )
210+                     logger .info ("Please contact support.\n " )
205211            else :
206212                logger .info ("Found %d rule match%s." , self .matches , 'es'  if  self .matches  >  1  else  '' )
207213                if  not  self .test_scan :
@@ -604,11 +610,12 @@ def _get_rules(self):
604610        # However it can happen that the rules file isn't removed for some reason, so remove any existing 
605611        # rules files before beginning a new scan, otherwise they may show up as matches in the scan results. 
606612        old_rules_files  =  sum ([glob (os .path .join (path , rules ))
607-                                for  path  in  ('/tmp' , '/var/tmp' )
613+                                for  path  in  ('/tmp' , '/var/tmp' ,  '/usr/tmp' ,  gettempdir () )
608614                               for  rules  in  ('.tmpmdsigs*' , 'tmp_malware-detection-client_rules.*' )], [])
609615        for  old_rules_file  in  old_rules_files :
610-             logger .debug ("Removing old rules file %s" , old_rules_file )
611-             os .remove (old_rules_file )
616+             if  os .path .exists (old_rules_file ):
617+                 logger .debug ("Removing old rules file %s" , old_rules_file )
618+                 os .remove (old_rules_file )
612619
613620        self .rules_location  =  self ._get_config_option ('rules_location' , '' )
614621
@@ -741,8 +748,16 @@ def scan_filesystem(self):
741748            return  False 
742749
743750        # Exclude the rules file and insights-client log files, unless they are things we specifically want to scan 
744-         if  self .rules_file  not  in   self .scan_fsobjects :
745-             self .filesystem_scan_exclude_list .append (self .rules_file )
751+         # Get a list of potential rules files locations,eg /tmp, /var/tmp, /usr/tmp and gettempdir() 
752+         # eg customers may have /tmp linked to /var/tmp so both must be checked for excluding the downloaded rules 
753+         rules_file_name  =  os .path .basename (self .rules_file )
754+         potential_tmp_dirs  =  set ([gettempdir (), '/tmp' , '/var/tmp' , '/usr/tmp' ])
755+         potential_rules_files  =  set (list (map (lambda  d : os .path .join (d , rules_file_name ), potential_tmp_dirs )) +  [self .rules_file ])
756+         rules_files  =  list (filter (lambda  f : os .path .isfile (f ), potential_rules_files ))
757+         for  rules_file  in  rules_files :
758+             if  rules_file  not  in   self .scan_fsobjects :
759+                 self .filesystem_scan_exclude_list .append (rules_file )
760+                 logger .debug ("Excluding rules file: %s" , rules_file )
746761        insights_log_files  =  glob (constants .default_log_file  +  '*' )
747762        self .filesystem_scan_exclude_list .extend (list (set (insights_log_files ) -  set (self .scan_fsobjects )))
748763
@@ -795,7 +810,12 @@ def scan_filesystem(self):
795810                logger .debug ("Unable to scan %s: %s" , toplevel_dir , cpe .output .strip ())
796811                continue 
797812
798-             self .parse_scan_output (output .strip ())
813+             try :
814+                 self .parse_scan_output (output .strip ())
815+             except  Exception  as  e :
816+                 self .potential_matches  +=  1 
817+                 logger .exception ("Rule match(es) potentially found in %s but problems encountered parsing the results: %s.  Skipping ..." ,
818+                                  toplevel_dir , str (e ))
799819
800820            dir_scan_end  =  time .time ()
801821            logger .info ("Scan time for %s: %d seconds" , toplevel_dir , (dir_scan_end  -  dir_scan_start ))
@@ -862,7 +882,12 @@ def scan_processes(self):
862882                logger .debug ("Unable to scan process %s: %s" , scan_pid , cpe .output .strip ())
863883                continue 
864884
865-             self .parse_scan_output (output )
885+             try :
886+                 self .parse_scan_output (output )
887+             except  Exception  as  e :
888+                 self .potential_matches  +=  1 
889+                 logger .exception ("Rule match(es) potentially found in process %s but problems encountered parsing the results: %s.  Skipping ..." ,
890+                                  scan_pid , str (e ))
866891
867892            pid_scan_end  =  time .time ()
868893            logger .info ("Scan time for process %s: %d seconds" , scan_pid , (pid_scan_end  -  pid_scan_start ))
@@ -969,11 +994,15 @@ def skip_string_data_lines(string_data_lines):
969994                rule_match ['matches' ] =  [rule_match_dict ]
970995
971996            if  self .add_metadata :
972-                 # Add extra data to each rule match, beyond what yara provides 
973-                 # Eg, for files: line numbers & context, checksums; for processes: process name 
974-                 # TODO: find more pythonic ways of doing this stuff instead of using system commands 
975-                 metadata_func  =  self ._add_file_metadata  if  source_type  ==  'file'  else  self ._add_process_metadata 
976-                 metadata_func (rule_match ['matches' ])
997+                 try :
998+                     # Add extra data to each rule match, beyond what yara provides 
999+                     # Eg, for files: line numbers & context, checksums; for processes: process name 
1000+                     # TODO: find more pythonic ways of doing this stuff instead of using system commands 
1001+                     metadata_func  =  self ._add_file_metadata  if  source_type  ==  'file'  else  self ._add_process_metadata 
1002+                     metadata_func (rule_match ['matches' ])
1003+                 except  Exception  as  e :
1004+                     logger .error ("Error adding metadata to rule match %s in %s %s: %s.  Skipping ..." ,
1005+                                  rule_name , source_type , source , str (e ))
9771006
9781007            self .matches  +=  1 
9791008            logger .info ("Matched rule %s in %s %s" , rule_name , source_type , source )
0 commit comments