diff --git a/domaintools_iris.json b/domaintools_iris.json index 043fa8a..18498e8 100644 --- a/domaintools_iris.json +++ b/domaintools_iris.json @@ -2326,6 +2326,118 @@ } ], "versions": "EQ(*)" + }, + { + "action": "domain discovery feed", + "description": "New domains as they are either discovered in domain registration information, observed by our global sensor network, or reported by trusted third parties.", + "type": "investigate", + "identifier": "domain_discovery_feed", + "read_only": true, + "parameters": { + "domain": { + "description": "Used to filter feed results. The filter can be an exact match or a partial match when the * character is included at the beginning and/or end of the value.", + "data_type": "string", + "order": 0 + }, + "before": { + "description": "The end of the query window in seconds or in ISO8601 format, relative to the current time, inclusive.", + "data_type": "string", + "order": 1 + }, + "after": { + "description": "The start of the query window in seconds in ISO8601 format, relative to the current time, inclusive.", + "data_type": "string", + "order": 2 + }, + "session_id": { + "description": "Serves as a unique identifier for the session. This parameter ensures that data retrieval begins from the latest timestamp recorded in the previous data pull.", + "data_type": "string", + "order": 3 + }, + "top": { + "description": "The number of results to return in the response payload. Primarily used for testing.", + "data_type": "string", + "order": 4 + } + }, + "render": { + "width": 12, + "title": "Domain Discovery List", + "type": "table", + "height": 10 + }, + "output": [ + { + "data_path": "action_result.data", + "data_type": "string" + }, + { + "data_path": "action_result.data.*.domain", + "data_type": "string", + "column_name": "Domain Names", + "column_order": 0, + "contains": [ + "domain" + ] + }, + { + "data_path": "action_result.data.*.timestamp", + "data_type": "string", + "column_name": "Time Stamp", + "column_order": 1 + }, + { + "data_path": "action_result.status", + "data_type": "string", + "example_values": [ + "success", + "failed" + ] + }, + { + "data_path": "action_result.summary", + "data_type": "string" + }, + { + "data_path": "action_result.message", + "data_type": "string" + }, + { + "data_path": "action_result.parameter.after", + "data_type": "string" + }, + { + "data_path": "action_result.parameter.before", + "data_type": "string" + }, + { + "data_path": "action_result.parameter.domain", + "data_type": "string" + }, + { + "data_path": "action_result.parameter.session_id", + "data_type": "string" + }, + { + "data_path": "action_result.parameter.top", + "data_type": "string" + }, + { + "data_path": "summary.total_objects", + "data_type": "numeric", + "example_values": [ + 1 + ] + }, + { + "data_path": "summary.total_objects_successful", + "data_type": "numeric", + "example_values": [ + 1 + ] + } + ], + "versions": "EQ(*)" } ], "pip39_dependencies": { diff --git a/domaintools_iris_connector.py b/domaintools_iris_connector.py index fda6e4a..733bc11 100644 --- a/domaintools_iris_connector.py +++ b/domaintools_iris_connector.py @@ -33,8 +33,12 @@ class DomainToolsConnector(BaseConnector): ACTION_ID_LOAD_HASH = "load_hash" ACTION_ID_ON_POLL = "on_poll" ACTION_ID_CONFIGURE_SCHEDULED_PLAYBOOK = "configure_monitoring_scheduled_playbooks" + + # RTUF action_ids ACTION_ID_NOD_FEED = "nod_feed" ACTION_ID_NAD_FEED = "nad_feed" + ACTION_ID_DOMAIN_DISCOVERY_FEED = "domain_discovery_feed" + RTUF_SERVICES_LIST = ["nod", "nad", "domaindiscovery"] def __init__(self): # Call the BaseConnectors init first @@ -46,6 +50,22 @@ def __init__(self): self._domains = None self._proxy_url = None self._scheduled_playbooks_list_name = "domaintools_scheduled_playbooks" + self.ACTION_ID_TO_ACTION = { + phantom.ACTION_ID_TEST_ASSET_CONNECTIVITY: self._test_connectivity, + self.ACTION_ID_DOMAIN_REPUTATION: self._domain_reputation, + self.ACTION_ID_DOMAIN_ENRICH: self._domain_enrich, + self.ACTION_ID_DOMAIN_INVESTIGATE: self._domain_investigate, + self.ACTION_ID_PIVOT: self._pivot_action, + self.ACTION_ID_REVERSE_IP: self._reverse_lookup_ip, + self.ACTION_ID_REVERSE_EMAIL: self._reverse_whois_email, + self.ACTION_ID_REVERSE_DOMAIN: self._reverse_lookup_domain, + self.ACTION_ID_LOAD_HASH: self._load_hash, + self.ACTION_ID_ON_POLL: self._on_poll, + self.ACTION_ID_CONFIGURE_SCHEDULED_PLAYBOOK: self._configure_monitoring_scheduled_playbooks, + self.ACTION_ID_NOD_FEED: self._nod_feed, + self.ACTION_ID_NAD_FEED: self._nad_feed, + self.ACTION_ID_DOMAIN_DISCOVERY_FEED: self._domain_discovery_feed, + } def initialize(self): # get the app configuation - super class pulls domaintools_iris.json @@ -67,9 +87,6 @@ def initialize(self): return phantom.APP_SUCCESS - def _is_feeds_service(self, service): - return service in ("nod", "nad") - def _handle_py_ver_for_byte(self, input_str): """ This method returns the binary|original string based on the Python version. @@ -113,7 +130,7 @@ def _parse_feeds_response(self, service, action_result, feeds_results): rows = response.strip().split("\n") for row in rows: - if service in ("nod", "nad"): + if service in self.RTUF_SERVICES_LIST: feed_result = json.loads(row) data.append( { @@ -243,7 +260,7 @@ def _do_query(self, service, action_result, query_args=None): response = service_api(**query_args, position=position) try: - if self._is_feeds_service(service): + if service in self.RTUF_SERVICES_LIST: # Separate parsing of feeds product return self._parse_feeds_response(service, action_result, response) @@ -323,8 +340,6 @@ def _test_connectivity(self): ) def handle_action(self, param): - ret_val = phantom.APP_SUCCESS - # Get the action that we are supposed to execute for this App Run action_id = self.get_action_identifier() @@ -351,34 +366,15 @@ def handle_action(self, param): self._domains = self._get_domains(hostnames) # Handle the actions - if action_id == phantom.ACTION_ID_TEST_ASSET_CONNECTIVITY: - ret_val = self._test_connectivity() - elif action_id == self.ACTION_ID_DOMAIN_ENRICH: - ret_val = self._domain_enrich(param) - elif action_id == self.ACTION_ID_DOMAIN_INVESTIGATE: - ret_val = self._domain_investigate(param) - elif action_id == self.ACTION_ID_DOMAIN_REPUTATION: - ret_val = self._domain_reputation(param) - elif action_id == self.ACTION_ID_PIVOT: - ret_val = self._pivot_action(param) - elif action_id == self.ACTION_ID_REVERSE_IP: - ret_val = self._reverse_lookup_ip(param) - elif action_id == self.ACTION_ID_REVERSE_EMAIL: - ret_val = self._reverse_whois_email(param) - elif action_id == self.ACTION_ID_REVERSE_DOMAIN: - ret_val = self._reverse_lookup_domain(param) - elif action_id == self.ACTION_ID_LOAD_HASH: - ret_val = self._load_hash(param) - elif action_id == self.ACTION_ID_ON_POLL: - ret_val = self._on_poll(param) - elif action_id == self.ACTION_ID_CONFIGURE_SCHEDULED_PLAYBOOK: - ret_val = self._configure_monitoring_scheduled_playbooks(param) - elif action_id == self.ACTION_ID_NOD_FEED: - ret_val = self._nod_feed(param) - elif action_id == self.ACTION_ID_NAD_FEED: - ret_val = self._nad_feed(param) - - return ret_val + action = self.ACTION_ID_TO_ACTION.get(action_id) + if action: + if action_id == phantom.ACTION_ID_TEST_ASSET_CONNECTIVITY: + # Special handling as this requires no param + return action() + + return action(param) + + return phantom.APP_SUCCESS def _get_proxy_url(self, config): proxy_url = None @@ -904,6 +900,23 @@ def _nad_feed(self, param): return action_result.get_status() + def _domain_discovery_feed(self, param): + self.save_progress(f"Starting {self.ACTION_ID_DOMAIN_DISCOVERY_FEED} action.") + action_result = self.add_action_result(ActionResult(param)) + params = {"always_sign_api_key": False} + params.update(param) + session_id = params.pop("session_id", None) + if session_id: + params["sessionID"] = session_id + + ret_val = self._do_query("domaindiscovery", action_result, query_args=params) + self.save_progress(f"Completed {self.ACTION_ID_DOMAIN_DISCOVERY_FEED} action.") + + if not ret_val: + return action_result.get_data() + + return action_result.get_status() + if __name__ == "__main__": import argparse