diff --git a/src/gcp_scanner/crawler/interface_crawler.py b/src/gcp_scanner/crawler/interface_crawler.py index 96b85524..b278bf9b 100644 --- a/src/gcp_scanner/crawler/interface_crawler.py +++ b/src/gcp_scanner/crawler/interface_crawler.py @@ -26,6 +26,12 @@ class ICrawler(metaclass=ABCMeta): """ + """Variable to identify if config file is needed + + Access Type: Private + """ + _config_dependency = False + @staticmethod @abstractmethod def crawl(project_name: str, service: discovery.Resource, @@ -49,3 +55,12 @@ def crawl(project_name: str, service: discovery.Resource, """ raise NotImplementedError("Child class must implement the crawl() method.") + + @property + def has_config_dependency(self) -> bool: + """Checks if the class needs a config file + + Returns: + bool: Returns config_dependency private variable which is False by default. + """ + return self._config_dependency \ No newline at end of file diff --git a/src/gcp_scanner/crawler/storage_buckets_crawler.py b/src/gcp_scanner/crawler/storage_buckets_crawler.py index 20425c6d..0ad2ec8d 100644 --- a/src/gcp_scanner/crawler/storage_buckets_crawler.py +++ b/src/gcp_scanner/crawler/storage_buckets_crawler.py @@ -24,6 +24,8 @@ class StorageBucketsCrawler(ICrawler): """Handle crawling of bucket names data.""" + _config_dependency = True # Define that config file is needed + def crawl(self, project_name: str, service: discovery.Resource, config: Dict[str, Union[bool, str]] = None) -> Dict[str, Tuple[Any, List[Any]]]: """Retrieve a list of buckets available in the project. @@ -77,6 +79,15 @@ def crawl(self, project_name: str, service: discovery.Resource, if dump_fd is not None: dump_fd.close() return buckets_dict + + @property + def has_config_dependency(self) -> bool: + """Checks if the class needs a config file + + Returns: + bool: Returns config_dependency private variable which is False by default. + """ + return self._config_dependency @classmethod def _get_bucket_iam(cls, bucket_name: str, service: discovery.Resource) -> List[Any]: diff --git a/src/gcp_scanner/scanner.py b/src/gcp_scanner/scanner.py index f2280f05..7db10319 100644 --- a/src/gcp_scanner/scanner.py +++ b/src/gcp_scanner/scanner.py @@ -180,8 +180,10 @@ def get_crawl( Returns: scan_result: a dictionary with scanning results """ - - res = crawler.crawl(project_id, client, crawler_config) + if crawler.has_config_dependency: + res = crawler.crawl(project_id, client, crawler_config) + else: + res = crawler.crawl(project_id, client) if res is not None and len(res) != 0: scan_results[crawler_name] = res return scan_results