Init: mediaserver

This commit is contained in:
2023-02-08 12:13:28 +01:00
parent 848bc9739c
commit f7c23d4ba9
31914 changed files with 6175775 additions and 0 deletions

View File

@@ -0,0 +1,529 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for adaptive_response_notable_events
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
set_defaults,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_adaptive_response_notable_events import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = (
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
)
self.module_name = "adaptive_response_notable_events"
self.key_transform = {
"action.notable.param.default_owner": "default_owner",
"action.notable.param.default_status": "default_status",
"action.notable.param.drilldown_name": "drilldown_name",
"action.notable.param.drilldown_search": "drilldown_search",
"action.notable.param.drilldown_earliest_offset": "drilldown_earliest_offset",
"action.notable.param.drilldown_latest_offset": "drilldown_latest_offset",
"action.notable.param.extract_artifacts": "extract_artifacts",
"action.notable.param.investigation_profiles": "investigation_profiles",
"action.notable.param.next_steps": "next_steps",
"action.notable.param.recommended_actions": "recommended_actions",
"action.notable.param.rule_description": "description",
"action.notable.param.rule_title": "name",
"action.notable.param.security_domain": "security_domain",
"action.notable.param.severity": "severity",
"name": "correlation_search_name",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def fail_json(self, msg):
"""Replace the AnsibleModule fail_json here
:param msg: The message for the failure
:type msg: str
"""
msg = msg.replace("(basic.py)", self._task.action)
raise AnsibleActionFail(msg)
# need to store 'recommended_actions','extract_artifacts','next_steps' and 'investigation_profiles'
# since merging in the parsed form will eliminate any differences
def save_params(self, want_conf):
param_store = {}
if "recommended_actions" in want_conf:
param_store["recommended_actions"] = want_conf[
"recommended_actions"
]
if "extract_artifacts" in want_conf:
param_store["extract_artifacts"] = want_conf["extract_artifacts"]
if "next_steps" in want_conf:
param_store["next_steps"] = want_conf["next_steps"]
if "investigation_profiles" in want_conf:
param_store["investigation_profiles"] = want_conf[
"investigation_profiles"
]
return param_store
# responsible for correctly setting certain parameters depending on the state being triggered.
# These parameters are responsible for enabling and disabling notable response actions
def create_metadata(self, metadata, mode="add"):
if mode == "add":
if "actions" in metadata:
if metadata["actions"] == "notable":
pass
elif (
len(metadata["actions"].split(",")) > 0
and "notable" not in metadata["actions"]
):
metadata["actions"] = metadata["actions"] + ", notable"
else:
metadata["actions"] = "notable"
metadata["action.notable"] = "1"
elif mode == "delete":
if "actions" in metadata:
if metadata["actions"] == "notable":
metadata["actions"] = ""
elif (
len(metadata["actions"].split(",")) > 0
and "notable" in metadata["actions"]
):
tmp_list = metadata["actions"].split(",")
tmp_list.remove(" notable")
metadata["actions"] = ",".join(tmp_list)
metadata["action.notable"] = "0"
return metadata
def map_params_to_object(self, config):
res = {}
res["correlation_search_name"] = config["name"]
res.update(map_params_to_obj(config["content"], self.key_transform))
if "extract_artifacts" in res:
res["extract_artifacts"] = json.loads(res["extract_artifacts"])
if "investigation_profiles" in res:
if res["investigation_profiles"] == "{}":
res.pop("investigation_profiles")
else:
res["investigation_profiles"] = json.loads(
res["investigation_profiles"]
)
investigation_profiles = []
for keys in res["investigation_profiles"].keys():
investigation_profiles.append(keys.split("profile://")[1])
res["investigation_profiles"] = investigation_profiles
if "recommended_actions" in res:
res["recommended_actions"] = res["recommended_actions"].split(",")
if "next_steps" in res:
next_steps = json.loads(res["next_steps"])["data"]
next_steps = next_steps.split("]][[")
# trimming trailing characters
next_steps[0] = next_steps[0].strip("[")
next_steps[-1] = next_steps[-1].strip("]")
res["next_steps"] = []
for element in next_steps:
res["next_steps"].append(element.split("|")[1])
if "default_status" in res:
mapping = {
"0": "unassigned",
"1": "new",
"2": "in progress",
"3": "pending",
"4": "resolved",
"5": "closed",
}
res["default_status"] = mapping[res["default_status"]]
# need to store correlation search details for populating future request payloads
metadata = {}
metadata["search"] = config["content"]["search"]
metadata["actions"] = config["content"]["actions"]
return res, metadata
def map_objects_to_params(self, metadata, want_conf):
res = {}
res.update(map_obj_to_params(want_conf, self.key_transform))
res.update(self.create_metadata(metadata))
if "action.notable.param.extract_artifacts" in res:
res["action.notable.param.extract_artifacts"] = json.dumps(
res["action.notable.param.extract_artifacts"]
)
if "action.notable.param.recommended_actions" in res:
res["action.notable.param.recommended_actions"] = ",".join(
res["action.notable.param.recommended_actions"]
)
if "action.notable.param.investigation_profiles" in res:
investigation_profiles = {}
for element in res["action.notable.param.investigation_profiles"]:
investigation_profiles["profile://" + element] = {}
res["action.notable.param.investigation_profiles"] = json.dumps(
investigation_profiles
)
if "action.notable.param.next_steps" in res:
next_steps = ""
for next_step in res["action.notable.param.next_steps"]:
next_steps += "[[action|{0}]]".format(next_step)
# NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
next_steps_dict = {"version": 1, "data": next_steps}
res["action.notable.param.next_steps"] = json.dumps(
next_steps_dict
)
if "action.notable.param.default_status" in res:
mapping = {
"unassigned": "0",
"new": "1",
"in progress": "2",
"pending": "3",
"resolved": "4",
"closed": "5",
}
res["action.notable.param.default_status"] = mapping[
res["action.notable.param.default_status"]
]
# need to remove 'name', otherwise the API call will try to modify the correlation search
res.pop("name")
return res
def search_for_resource_name(self, conn_request, correlation_search_name):
query_dict = conn_request.get_by_path(
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
)
search_result = {}
if query_dict:
search_result, metadata = self.map_params_to_object(
query_dict["entry"][0]
)
else:
raise AnsibleActionFail(
"Correlation Search '{0}' doesn't exist".format(
correlation_search_name
)
)
return search_result, metadata
# Since there is no delete operation associated with an action,
# The delete operation will unset the relevant fields
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
search_by_name, metadata = self.search_for_resource_name(
conn_request, want_conf["correlation_search_name"]
)
search_by_name = utils.remove_empties(search_by_name)
# Compare obtained values with a dict representing values in a 'deleted' state
diff_cmp = {
"correlation_search_name": want_conf[
"correlation_search_name"
],
"drilldown_earliest_offset": "$info_min_time$",
"drilldown_latest_offset": "$info_max_time$",
}
# if the obtained values are different from 'deleted' state values
if search_by_name and search_by_name != diff_cmp:
before.append(search_by_name)
payload = {
"action.notable.param.default_owner": "",
"action.notable.param.default_status": "",
"action.notable.param.drilldown_name": "",
"action.notable.param.drilldown_search": "",
"action.notable.param.drilldown_earliest_offset": "$info_min_time$",
"action.notable.param.drilldown_latest_offset": "$info_max_time$",
"action.notable.param.extract_artifacts": "{}",
"action.notable.param.investigation_profiles": "{}",
"action.notable.param.next_steps": "",
"action.notable.param.recommended_actions": "",
"action.notable.param.rule_description": "",
"action.notable.param.rule_title": "",
"action.notable.param.security_domain": "",
"action.notable.param.severity": "",
}
payload.update(self.create_metadata(metadata, mode="delete"))
url = "{0}/{1}".format(
self.api_object,
quote(want_conf["correlation_search_name"]),
)
conn_request.create_update(
url,
data=payload,
)
changed = True
after = []
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
defaults = {
"drilldown_earliest_offset": "$info_min_time$",
"drilldown_latest_offset": "$info_max_time$",
"extract_artifacts": {
"asset": [
"src",
"dest",
"dvc",
"orig_host",
],
"identity": [
"src_user",
"user",
"src_user_id",
"src_user_role",
"user_id",
"user_role",
"vendor_account",
],
},
"investigation_profiles": "{}",
}
remove_from_diff_compare = []
for want_conf in config:
have_conf, metadata = self.search_for_resource_name(
conn_request, want_conf["correlation_search_name"]
)
correlation_search_name = want_conf["correlation_search_name"]
if "notable" in metadata["actions"]:
want_conf = set_defaults(want_conf, defaults)
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
# need to store 'recommended_actions','extract_artifacts'
# 'next_steps' and 'investigation_profiles'
# since merging in the parsed form will eliminate any differences
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
# restoring parameters
want_conf.update(param_store)
changed = True
payload = self.map_objects_to_params(
metadata, want_conf
)
url = "{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json, metadata = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
conn_request=conn_request, config=[want_conf]
)
changed = True
payload = self.map_objects_to_params(
metadata, want_conf
)
url = "{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json, metadata = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
payload = self.map_objects_to_params(metadata, want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json, metadata = self.map_params_to_object(
api_response["entry"][0]
)
after.extend(before)
after.append(response_json)
if not changed:
after = None
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
self._result[self.module_name] = {}
# config is retrieved as a string; need to deserialise
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
action_module=self,
connection=conn,
not_rest_data_keys=["state"],
)
if self._task.args["state"] == "gathered":
if config:
self._result["changed"] = False
self._result[self.module_name]["gathered"] = []
for item in config:
self._result[self.module_name]["gathered"].append(
self.search_for_resource_name(
conn_request, item["correlation_search_name"]
)[0]
)
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
(
self._result[self.module_name],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
elif self._task.args["state"] == "deleted":
(
self._result[self.module_name],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
return self._result

View File

@@ -0,0 +1,435 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for splunk_correlation_searches
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
set_defaults,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = (
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
)
self.module_name = "correlation_searches"
self.key_transform = {
"disabled": "disabled",
"name": "name",
"description": "description",
"search": "search",
"action.correlationsearch.annotations": "annotations",
"request.ui_dispatch_app": "ui_dispatch_context",
"dispatch.earliest_time": "time_earliest",
"dispatch.latest_time": "time_latest",
"cron_schedule": "cron_schedule",
"realtime_schedule": "scheduling",
"schedule_window": "schedule_window",
"schedule_priority": "schedule_priority",
"alert.digest_mode": "trigger_alert",
"alert_type": "trigger_alert_when",
"alert_comparator": "trigger_alert_when_condition",
"alert_threshold": "trigger_alert_when_value",
"alert.suppress": "suppress_alerts",
"alert.suppress.period": "throttle_window_duration",
"alert.suppress.fields": "throttle_fields_to_group_by",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def fail_json(self, msg):
"""Replace the AnsibleModule fail_json here
:param msg: The message for the failure
:type msg: str
"""
msg = msg.replace("(basic.py)", self._task.action)
raise AnsibleActionFail(msg)
# need to store 'annotations' and 'throttle_fields_to_group_by'
# since merging in the parsed form will eliminate any differences
# This is because these fields are getting converted from strings
# to lists/dictionaries, and so these fields need to be compared
# as such
def save_params(self, want_conf):
param_store = {}
if "annotations" in want_conf:
param_store["annotations"] = want_conf["annotations"]
if "throttle_fields_to_group_by" in want_conf:
param_store["throttle_fields_to_group_by"] = want_conf[
"throttle_fields_to_group_by"
]
return param_store
def map_params_to_object(self, config):
res = {}
res["app"] = config["acl"]["app"]
res.update(map_params_to_obj(config["content"], self.key_transform))
res.update(map_params_to_obj(config, self.key_transform))
if "scheduling" in res:
if res["scheduling"]:
res["scheduling"] = "realtime"
else:
res["scheduling"] = "continuous"
if "trigger_alert" in res:
if res["trigger_alert"]:
res["trigger_alert"] = "once"
else:
res["trigger_alert"] = "for each result"
if "throttle_fields_to_group_by" in res:
res["throttle_fields_to_group_by"] = res[
"throttle_fields_to_group_by"
].split(",")
if "annotations" in res:
res["annotations"] = json.loads(res["annotations"])
custom = []
# need to check for custom annotation frameworks
for k, v in res["annotations"].items():
if k in {"cis20", "nist", "mitre_attack", "kill_chain_phases"}:
continue
entry = {}
entry["framework"] = k
entry["custom_annotations"] = v
custom.append(entry)
if custom:
for entry in custom:
res["annotations"].pop(entry["framework"])
res["annotations"]["custom"] = custom
return res
def map_objects_to_params(self, want_conf):
res = {}
# setting parameters that enable correlation search
res["action.correlationsearch.enabled"] = "1"
res["is_scheduled"] = True
res["dispatch.rt_backfill"] = True
res["action.correlationsearch.label"] = want_conf["name"]
res.update(map_obj_to_params(want_conf, self.key_transform))
if "realtime_schedule" in res:
if res["realtime_schedule"] == "realtime":
res["realtime_schedule"] = True
else:
res["realtime_schedule"] = False
if "alert.digest_mode" in res:
if res["alert.digest_mode"] == "once":
res["alert.digest_mode"] = True
else:
res["alert.digest_mode"] = False
if "alert.suppress.fields" in res:
res["alert.suppress.fields"] = ",".join(
res["alert.suppress.fields"]
)
if (
"action.correlationsearch.annotations" in res
and "custom" in res["action.correlationsearch.annotations"]
):
for ele in res["action.correlationsearch.annotations"]["custom"]:
res["action.correlationsearch.annotations"][
ele["framework"]
] = ele["custom_annotations"]
res["action.correlationsearch.annotations"].pop("custom")
res["action.correlationsearch.annotations"] = json.dumps(
res["action.correlationsearch.annotations"]
)
return res
def search_for_resource_name(self, conn_request, correlation_search_name):
query_dict = conn_request.get_by_path(
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
)
search_result = {}
if query_dict:
search_result = self.map_params_to_object(query_dict["entry"][0])
return search_result
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if search_by_name:
before.append(search_by_name)
url = "{0}/{1}".format(
self.api_object,
quote(want_conf["name"]),
)
conn_request.delete_by_path(
url,
)
changed = True
after = []
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
defaults = {}
remove_from_diff_compare = []
for want_conf in config:
have_conf = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if have_conf:
want_conf = set_defaults(want_conf, defaults)
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
name = want_conf["name"]
before.append(have_conf)
if self._task.args["state"] == "merged":
# need to store 'annotations' and 'throttle_group_by_field'
# since merging in the parsed form will eliminate any differences
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
# restoring parameters
want_conf.update(param_store)
changed = True
payload = self.map_objects_to_params(want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
conn_request=conn_request, config=[want_conf]
)
changed = True
payload = self.map_objects_to_params(want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(name),
)
# while creating new correlation search, this is how to set the 'app' field
if "app" in want_conf:
url = url.replace(
"SplunkEnterpriseSecuritySuite",
want_conf["app"],
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
name = want_conf["name"]
payload = self.map_objects_to_params(want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(name),
)
# while creating new correlation search, this is how to set the 'app' field
if "app" in want_conf:
url = url.replace(
"SplunkEnterpriseSecuritySuite", want_conf["app"]
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.extend(before)
after.append(response_json)
if not changed:
after = None
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
self._result[self.module_name] = {}
# config is retrieved as a string; need to deserialise
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
action_module=self,
connection=conn,
not_rest_data_keys=["state"],
)
if self._task.args["state"] == "gathered":
if config:
self._result["changed"] = False
self._result["gathered"] = []
for item in config:
result = self.search_for_resource_name(
conn_request, item["name"]
)
if result:
self._result["gathered"].append(result)
for item in config:
self._result["gathered"].append(
self.search_for_resource_name(
conn_request, item["name"]
)
)
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
(
self._result[self.module_name],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
elif self._task.args["state"] == "deleted":
(
self._result[self.module_name],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
return self._result

View File

@@ -0,0 +1,313 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for data_inputs_monitor
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
set_defaults,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = "servicesNS/nobody/search/data/inputs/monitor"
self.module_name = "data_inputs_monitor"
self.key_transform = {
"blacklist": "blacklist",
"check-index": "check_index", # not returned
"check-path": "check_path", # not returned
"crc-salt": "crc_salt",
"disabled": "disabled",
"followTail": "follow_tail",
"host": "host",
"host_regex": "host_regex",
"host_segment": "host_segment",
"ignore-older-than": "ignore_older_than", # not returned
"index": "index",
"name": "name",
"recursive": "recursive",
"rename-source": "rename_source", # not returned
"sourcetype": "sourcetype",
"time-before-close": "time_before_close", # not returned
"whitelist": "whitelist",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def map_params_to_object(self, config):
res = {}
res["name"] = config["name"]
# splunk takes "crc-salt" as input parameter, and returns "crcSalt" in output
# therefore we can't directly use mapping
if config["content"].get("crcSalt"):
config["content"]["crc-salt"] = config["content"]["crcSalt"]
res.update(map_params_to_obj(config["content"], self.key_transform))
return res
def search_for_resource_name(self, conn_request, directory_name):
query_dict = conn_request.get_by_path(
"{0}/{1}".format(self.api_object, quote_plus(directory_name))
)
search_result = {}
if query_dict:
search_result = self.map_params_to_object(query_dict["entry"][0])
return search_result
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if search_by_name:
before.append(search_by_name)
conn_request.delete_by_path(
"{0}/{1}".format(
self.api_object, quote_plus(want_conf["name"])
)
)
changed = True
after = []
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
defaults = {
"disabled": False,
"host": "$decideOnStartup",
"index": "default",
}
remove_from_diff_compare = [
"check_path",
"check_index",
"ignore_older_than",
"time_before_close",
"rename_source",
]
for want_conf in config:
have_conf = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if have_conf:
want_conf = set_defaults(want_conf, defaults)
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
diff = remove_get_keys_from_payload_dict(
diff, remove_from_diff_compare
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
url = "{0}/{1}".format(
self.api_object,
quote_plus(payload.pop("name")),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
conn_request.delete_by_path(
"{0}/{1}".format(
self.api_object,
quote_plus(want_conf["name"]),
)
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
url = "{0}".format(self.api_object)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
payload = map_obj_to_params(want_conf, self.key_transform)
url = "{0}".format(self.api_object)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.extend(before)
after.append(response_json)
if not changed:
after = None
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
# self._result[self.module_name] = {}
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
action_module=self,
connection=conn,
not_rest_data_keys=["state"],
)
if self._task.args["state"] == "gathered":
if config:
self._result["gathered"] = []
self._result["changed"] = False
for item in config:
result = self.search_for_resource_name(
conn_request, item["name"]
)
if result:
self._result["gathered"].append(result)
else:
self._result["gathered"] = conn_request.get_by_path(
self.api_object
)["entry"]
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
(
self._result[self.module_name],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
elif self._task.args["state"] == "deleted":
(
self._result[self.module_name],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
return self._result

View File

@@ -0,0 +1,538 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for data_inputs_network
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = "servicesNS/nobody/search/data/inputs"
self.module_return = "data_inputs_network"
self.key_transform = {
"name": "name",
"connection_host": "connection_host",
"disabled": "disabled",
"index": "index",
"host": "host",
"no_appending_timestamp": "no_appending_timestamp",
"no_priority_stripping": "no_priority_stripping",
"rawTcpDoneTimeout": "raw_tcp_done_timeout",
"restrictToHost": "restrict_to_host",
"queue": "queue",
"SSL": "ssl",
"source": "source",
"sourcetype": "sourcetype",
"token": "token",
"password": "password",
"requireClientCert": "require_client_cert",
"rootCA": "root_ca",
"serverCert": "server_cert",
"cipherSuite": "cipher_suite",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def fail_json(self, msg):
"""Replace the AnsibleModule fail_json here
:param msg: The message for the failure
:type msg: str
"""
msg = msg.replace("(basic.py)", self._task.action)
raise AnsibleActionFail(msg)
def map_params_to_object(self, config, datatype=None):
res = {}
res["name"] = config["name"]
res.update(map_params_to_obj(config["content"], self.key_transform))
# API returns back "index", even though it can't be set within /tcp/cooked
if datatype:
if datatype == "cooked" and "index" in res:
res.pop("index")
elif datatype == "splunktcptoken":
if "index" in res:
res.pop("index")
if "host" in res:
res.pop("host")
if "disabled" in res:
res.pop("disabled")
return res
# This function is meant to construct the URL and handle GET, POST and DELETE calls
# depending on th context. The URLs constructed and handled are:
# /tcp/raw[/{name}]
# /tcp/cooked[/{name}]
# /tcp/splunktcptoken[/{name}]
# /tcp/ssl[/{name}]
# /udp[/{name}]
def request_by_path(
self,
conn_request,
protocol,
datatype=None,
name=None,
req_type="get",
payload=None,
):
query_dict = None
url = ""
if protocol == "tcp":
if not datatype:
raise AnsibleActionFail("No datatype specified for TCP input")
# In all cases except "ssl" datatype, creation of objects is handled
# by a POST request to the parent directory. Therefore name shouldn't
# be included in the URL.
if not name or (req_type == "post_create" and datatype != "ssl"):
name = ""
url = "{0}/{1}/{2}/{3}".format(
self.api_object,
protocol,
datatype,
quote_plus(str(name)),
)
# if no "name" was provided
if url[-1] == "/":
url = url[:-1]
elif protocol == "udp":
if datatype:
raise AnsibleActionFail("Datatype specified for UDP input")
if not name or req_type == "post_create":
name = ""
url = "{0}/{1}/{2}".format(
self.api_object,
protocol,
quote_plus(str(name)),
)
# if no "name" was provided
if url[-1] == "/":
url = url[:-1]
else:
raise AnsibleActionFail(
"Incompatible protocol specified. Please specify 'tcp' or 'udp'"
)
if req_type == "get":
query_dict = conn_request.get_by_path(url)
elif req_type == "post_create":
query_dict = conn_request.create_update(url, data=payload)
elif req_type == "post_update":
payload.pop("name")
query_dict = conn_request.create_update(url, data=payload)
elif req_type == "delete":
query_dict = conn_request.delete_by_path(url)
return query_dict
def search_for_resource_name(self, conn_request, protocol, datatype, name):
query_dict = self.request_by_path(
conn_request,
protocol,
datatype,
name,
)
search_result = {}
if query_dict:
search_result = self.map_params_to_object(
query_dict["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
search_result["protocol"] = protocol
if datatype:
search_result["datatype"] = datatype
if datatype == "ssl":
search_result["name"] = name
return search_result
# If certain parameters are present, Splunk appends the value of those parameters
# to the name. Therefore this causes idempotency to fail. This function looks for
# said parameters and conducts checks to see if the configuration already exists.
def parse_config(self, conn_request, want_conf):
old_name = None
protocol = want_conf["protocol"]
datatype = want_conf.get("datatype")
if not want_conf.get("name"):
raise AnsibleActionFail("No name specified for merge action")
else:
# Int values confuse diff
want_conf["name"] = str(want_conf["name"])
old_name = want_conf["name"]
if (
want_conf.get("restrict_to_host")
and old_name.split(":")[0] == want_conf["restrict_to_host"]
):
old_name = old_name.split(":")[1]
# If "restrictToHost" parameter is set, the value of this parameter is appended
# to the numerical name meant to represent port number
if (
want_conf.get("restrict_to_host")
and want_conf["restrict_to_host"] not in want_conf["name"]
):
want_conf["name"] = "{0}:{1}".format(
want_conf["restrict_to_host"], want_conf["name"]
)
# If datatype is "splunktcptoken", the value "splunktcptoken://" is appended
# to the name
elif (
datatype
and datatype == "splunktcptoken"
and "splunktcptoken://" not in want_conf["name"]
):
want_conf["name"] = "{0}{1}".format(
"splunktcptoken://", want_conf["name"]
)
name = want_conf["name"]
# If the above parameters are present, but the object doesn't exist
# the value of the parameters shouldn't be prepended to the name.
# Otherwise Splunk returns 400. This check is takes advantage of this
# and sets the correct name.
have_conf = None
try:
have_conf = self.search_for_resource_name(
conn_request,
protocol,
datatype,
name,
)
# while creating new conf, we need to only use numerical values
# splunk will later append param value to it.
if not have_conf:
want_conf["name"] = old_name
except AnsibleActionFail:
want_conf["name"] = old_name
have_conf = self.search_for_resource_name(
conn_request,
protocol,
datatype,
old_name,
)
# SSL response returns a blank "name" parameter, which causes problems
if datatype == "ssl":
have_conf["name"] = want_conf["name"]
return have_conf, protocol, datatype, name, old_name
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
if not want_conf.get("name"):
raise AnsibleActionFail("No name specified")
have_conf, protocol, datatype, name, _old_name = self.parse_config(
conn_request, want_conf
)
if protocol == "tcp" and datatype == "ssl":
raise AnsibleActionFail("Deleted state not supported for SSL")
if have_conf:
before.append(have_conf)
self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="delete",
)
changed = True
after = []
ret_config = {}
ret_config["before"] = before
ret_config["after"] = after
return ret_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
for want_conf in config:
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
remove_from_diff_compare = [
"datatype",
"protocol",
"cipher_suite",
]
have_conf, protocol, datatype, name, old_name = self.parse_config(
conn_request, want_conf
)
if (
protocol == "tcp"
and datatype == "ssl"
and self._task.args["state"] == "replaced"
):
raise AnsibleActionFail("Replaced state not supported for SSL")
if have_conf:
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
diff = remove_get_keys_from_payload_dict(
diff, remove_from_diff_compare
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="post_update",
payload=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
response_json["protocol"] = protocol
if datatype:
response_json["datatype"] = datatype
after.append(response_json)
elif self._task.args["state"] == "replaced":
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="delete",
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
# while creating new conf, we need to only use numerical values
# splunk will later append param value to it.
payload["name"] = old_name
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="post_create",
payload=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
response_json["protocol"] = protocol
if datatype:
response_json["datatype"] = datatype
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
payload = map_obj_to_params(want_conf, self.key_transform)
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="post_create",
payload=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
response_json["protocol"] = protocol
if datatype:
response_json["datatype"] = datatype
after.extend(before)
after.append(response_json)
if not changed:
after = None
ret_config = {}
ret_config["before"] = before
ret_config["after"] = after
return ret_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
connection=conn,
action_module=self,
)
if self._task.args["state"] == "gathered":
if config:
self._result["gathered"] = []
self._result["changed"] = False
for item in config:
if item.get("name"):
result = self.search_for_resource_name(
conn_request,
item["protocol"],
item.get("datatype"),
item.get("name"),
)
if result:
self._result["gathered"].append(result)
else:
response_list = self.request_by_path(
conn_request,
item["protocol"],
item.get("datatype"),
None,
)["entry"]
self._result["gathered"] = []
for response_dict in response_list:
self._result["gathered"].append(
self.map_params_to_object(response_dict),
)
else:
raise AnsibleActionFail("No protocol specified")
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
if config:
(
self._result[self.module_return],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if not self._result[self.module_return]["after"]:
self._result[self.module_return].pop("after")
elif self._task.args["state"] == "deleted":
if config:
(
self._result[self.module_return],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_return]["after"] is None:
self._result[self.module_return].pop("after")
return self._result

View File

@@ -0,0 +1,77 @@
# (c) 2019 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
author: Ansible Security Team (@ansible-security)
name: splunk
short_description: HttpApi Plugin for Splunk
description:
- This HttpApi plugin provides methods to connect to Splunk over a
HTTP(S)-based api.
version_added: "1.0.0"
"""
import json
from ansible.module_utils.basic import to_text
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible_collections.ansible.netcommon.plugins.plugin_utils.httpapi_base import (
HttpApiBase,
)
from ansible.module_utils.connection import ConnectionError
BASE_HEADERS = {"Content-Type": "application/json"}
class HttpApi(HttpApiBase):
def send_request(self, request_method, path, payload=None):
# payload = json.dumps(payload) if payload else '{}'
try:
self._display_request(request_method, path)
response, response_data = self.connection.send(
path,
payload,
method=request_method,
headers=BASE_HEADERS,
force_basic_auth=True,
)
value = self._get_response_value(response_data)
return response.getcode(), self._response_to_json(value)
except AnsibleConnectionFailure as e:
self.connection.queue_message(
"vvv", "AnsibleConnectionFailure: %s" % e
)
if to_text("Could not connect to") in to_text(e):
raise
if to_text("401") in to_text(e):
return 401, "Authentication failure"
else:
return 404, "Object not found"
except HTTPError as e:
error = json.loads(e.read())
return e.code, error
def _display_request(self, request_method, path):
self.connection.queue_message(
"vvvv",
"Web Services: %s %s/%s"
% (request_method, self.connection._url, path),
)
def _get_response_value(self, response_data):
return to_text(response_data.getvalue())
def _response_to_json(self, response_text):
try:
return json.loads(response_text) if response_text else {}
# JSONDecodeError only available on Python 3.5+
except ValueError:
raise ConnectionError("Invalid JSON response: %s" % response_text)

View File

@@ -0,0 +1,256 @@
# -*- coding: utf-8 -*-
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.urls import CertificateError
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.connection import (
ConnectionError,
Connection,
)
from ansible.module_utils._text import to_text
from ansible.module_utils.six import iteritems
def parse_splunk_args(module):
"""
Get the valid fields that should be passed to the REST API as urlencoded
data so long as the argument specification to the module follows the
convention:
1) name field is Required to be passed as data to REST API
2) all module argspec items that should be passed to data are not
Required by the module and are set to default=None
"""
try:
splunk_data = {}
for argspec in module.argument_spec:
if (
"default" in module.argument_spec[argspec]
and module.argument_spec[argspec]["default"] is None
and module.params[argspec] is not None
):
splunk_data[argspec] = module.params[argspec]
return splunk_data
except TypeError as e:
module.fail_json(
msg="Invalid data type provided for splunk module_util.parse_splunk_args: {0}".format(
e
)
)
def remove_get_keys_from_payload_dict(payload_dict, remove_key_list):
for each_key in remove_key_list:
if each_key in payload_dict:
payload_dict.pop(each_key)
return payload_dict
def map_params_to_obj(module_params, key_transform):
"""The fn to convert the api returned params to module params
:param module_params: Module params
:param key_transform: Dict with module equivalent API params
:rtype: A dict
:returns: dict with module prams transformed having API expected params
"""
obj = {}
for k, v in iteritems(key_transform):
if k in module_params and (
module_params.get(k)
or module_params.get(k) == 0
or module_params.get(k) is False
):
obj[v] = module_params.pop(k)
return obj
def map_obj_to_params(module_return_params, key_transform):
"""The fn to convert the module params to api return params
:param module_return_params: API returned response params
:param key_transform: Module params
:rtype: A dict
:returns: dict with api returned value to module param value
"""
temp = {}
for k, v in iteritems(key_transform):
if v in module_return_params and (
module_return_params.get(v)
or module_return_params.get(v) == 0
or module_return_params.get(v) is False
):
temp[k] = module_return_params.pop(v)
return temp
def set_defaults(config, defaults):
for k, v in defaults.items():
config.setdefault(k, v)
return config
class SplunkRequest(object):
# TODO: There is a ton of code only present to make sure the legacy modules
# work as intended. Once the modules are deprecated and no longer receive
# support, this object needs to be rewritten.
def __init__(
self,
module=None,
headers=None,
action_module=None, # needs to be dealt with after end of support
connection=None,
keymap=None,
not_rest_data_keys=None,
# The legacy modules had a partial implementation of keymap, where the data
# passed to 'create_update' would completely be overwritten, and replaced
# by the 'get_data' function. This flag ensures that the modules that hadn't
# yet been updated to use the keymap, can continue to work as originally intended
override=True,
):
# check if call being made by legacy module (passes 'module' param)
self.module = module
if module:
# This will be removed, once all of the available modules
# are moved to use action plugin design, as otherwise test
# would start to complain without the implementation.
self.connection = Connection(self.module._socket_path)
self.legacy = True
elif connection:
self.connection = connection
try:
self.connection.load_platform_plugins("splunk.es.splunk")
self.module = action_module
self.legacy = False
except ConnectionError:
raise
# The Splunk REST API endpoints often use keys that aren't pythonic so
# we need to handle that with a mapping to allow keys to be proper
# variables in the module argspec
if keymap is None:
self.keymap = {}
else:
self.keymap = keymap
# Select whether payload passed to create update is overriden or not
self.override = override
# This allows us to exclude specific argspec keys from being included by
# the rest data that don't follow the splunk_* naming convention
if not_rest_data_keys is None:
self.not_rest_data_keys = []
else:
self.not_rest_data_keys = not_rest_data_keys
self.not_rest_data_keys.append("validate_certs")
def _httpapi_error_handle(self, method, uri, payload=None):
try:
code, response = self.connection.send_request(
method, uri, payload=payload
)
if code == 404:
if to_text("Object not found") in to_text(response) or to_text(
"Could not find object"
) in to_text(response):
return {}
if not (code >= 200 and code < 300):
self.module.fail_json(
msg="Splunk httpapi returned error {0} with message {1}".format(
code, response
),
)
return response
except ConnectionError as e:
self.module.fail_json(
msg="connection error occurred: {0}".format(e),
)
except CertificateError as e:
self.module.fail_json(
msg="certificate error occurred: {0}".format(e),
)
except ValueError as e:
try:
self.module.fail_json(
msg="certificate not found: {0}".format(e)
)
except AttributeError:
pass
def get(self, url, **kwargs):
return self._httpapi_error_handle("GET", url, **kwargs)
def put(self, url, **kwargs):
return self._httpapi_error_handle("PUT", url, **kwargs)
def post(self, url, **kwargs):
return self._httpapi_error_handle("POST", url, **kwargs)
def delete(self, url, **kwargs):
return self._httpapi_error_handle("DELETE", url, **kwargs)
def get_data(self, config=None):
"""
Get the valid fields that should be passed to the REST API as urlencoded
data so long as the argument specification to the module follows the
convention:
- the key to the argspec item does not start with splunk_
- the key does not exist in the not_data_keys list
"""
try:
splunk_data = {}
if self.legacy and not config:
config = self.module.params
for param in config:
if (config[param]) is not None and (
param not in self.not_rest_data_keys
):
if param in self.keymap:
splunk_data[self.keymap[param]] = config[param]
else:
splunk_data[param] = config[param]
return splunk_data
except TypeError as e:
self.module.fail_json(
msg="invalid data type provided: {0}".format(e)
)
def get_urlencoded_data(self, config):
return urlencode(self.get_data(config))
def get_by_path(self, rest_path):
"""
GET attributes of a monitor by rest path
"""
return self.get("/{0}?output_mode=json".format(rest_path))
def delete_by_path(self, rest_path):
"""
DELETE attributes of a monitor by rest path
"""
return self.delete("/{0}?output_mode=json".format(rest_path))
def create_update(self, rest_path, data):
"""
Create or Update a file/directory monitor data input in Splunk
"""
# when 'self.override' is True, the 'get_data' function replaces 'data'
# in order to make use of keymap
if data is not None and self.override:
data = self.get_urlencoded_data(data)
return self.post(
"/{0}?output_mode=json".format(rest_path), payload=data
)

View File

@@ -0,0 +1,462 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: adaptive_response_notable_event
short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
description:
- This module allows for creation, deletion, and modification of Splunk
Enterprise Security Notable Event Adaptive Responses that are associated
with a correlation search
version_added: "1.0.0"
deprecated:
alternative: splunk_adaptive_response_notable_events
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
name:
description:
- Name of notable event
required: true
type: str
correlation_search_name:
description:
- Name of correlation search to associate this notable event adaptive response with
required: true
type: str
description:
description:
- Description of the notable event, this will populate the description field for the web console
required: true
type: str
state:
description:
- Add or remove a data source.
required: true
choices: [ "present", "absent" ]
type: str
security_domain:
description:
- Splunk Security Domain
type: str
required: False
choices:
- "access"
- "endpoint"
- "network"
- "threat"
- "identity"
- "audit"
default: "threat"
severity:
description:
- Severity rating
type: str
required: False
choices:
- "informational"
- "low"
- "medium"
- "high"
- "critical"
- "unknown"
default: "high"
default_owner:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
required: False
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
required: False
choices:
- "unassigned"
- "new"
- "in progress"
- "pending"
- "resolved"
- "closed"
drill_down_name:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
required: False
drill_down_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
required: False
drill_down_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
to match the earliest time of the search
type: str
required: False
default: \"$info_min_time$\"
drill_down_latest_offset:
description:
- Set the amount of time after the triggering event to search for related
events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
time to match the latest time of the search
type: str
required: False
default: \"$info_max_time$\"
investigation_profiles:
description:
- Investigation profile to assiciate the notable event with.
type: str
required: False
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
required: False
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
- Identifying Recommended Adaptive Responses will highlight those actions
for the analyst when looking at the list of response actions available,
making it easier to find them among the longer list of available actions.
type: list
elements: str
required: False
asset_extraction:
description:
- list of assets to extract, select any one or many of the available choices
- defaults to all available choices
type: list
elements: str
choices:
- src
- dest
- dvc
- orig_host
default:
- src
- dest
- dvc
- orig_host
required: False
identity_extraction:
description:
- list of identity fields to extract, select any one or many of the available choices
- defaults to all available choices
type: list
elements: str
choices:
- user
- src_user
default:
- user
- src_user
required: False
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of using splunk.es.adaptive_response_notable_event module
splunk.es.adaptive_response_notable_event:
name: "Example notable event from Ansible"
correlation_search_name: "Example Correlation Search From Ansible"
description: "Example notable event from Ansible, description."
state: "present"
next_steps:
- ping
- nslookup
recommended_actions:
- script
- ansiblesecurityautomation
"""
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
name=dict(required=True, type="str"),
correlation_search_name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
security_domain=dict(
choices=[
"access",
"endpoint",
"network",
"threat",
"identity",
"audit",
],
required=False,
default="threat",
),
severity=dict(
choices=[
"informational",
"low",
"medium",
"high",
"critical",
"unknown",
],
required=False,
default="high",
),
default_owner=dict(required=False, type="str"),
default_status=dict(
choices=[
"unassigned",
"new",
"in progress",
"pending",
"resolved",
"closed",
],
required=False,
),
drill_down_name=dict(required=False, type="str"),
drill_down_search=dict(required=False, type="str"),
drill_down_earliest_offset=dict(
required=False, type="str", default="$info_min_time$"
),
drill_down_latest_offset=dict(
required=False, type="str", default="$info_max_time$"
),
investigation_profiles=dict(required=False, type="str"),
next_steps=dict(
required=False, type="list", elements="str", default=[]
),
recommended_actions=dict(
required=False, type="list", elements="str", default=[]
),
asset_extraction=dict(
required=False,
type="list",
elements="str",
default=["src", "dest", "dvc", "orig_host"],
choices=["src", "dest", "dvc", "orig_host"],
),
identity_extraction=dict(
required=False,
type="list",
elements="str",
default=["user", "src_user"],
choices=["user", "src_user"],
),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
splunk_request = SplunkRequest(
module,
override=False,
headers={"Content-Type": "application/x-www-form-urlencoded"},
not_rest_data_keys=["state"],
)
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["correlation_search_name"])
)
)
# Have to custom craft the data here because they overload the saved searches
# endpoint in the rest api and we want to hide the nuance from the user
request_post_data = {}
# FIXME need to figure out how to properly support these, the possible values appear to
# be dynamically created based on what the search is indexing
# request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]'
# request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
if module.params["next_steps"]:
if len(module.params["next_steps"]) == 1:
next_steps = "[[action|{0}]]".format(
module.params["next_steps"][0]
)
else:
next_steps = ""
for next_step in module.params["next_steps"]:
if next_steps:
next_steps += "\n[[action|{0}]]".format(next_step)
else:
next_steps = "[[action|{0}]]".format(next_step)
# NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
next_steps_dict = {"version": 1, "data": next_steps}
request_post_data["action.notable.param.next_steps"] = json.dumps(
next_steps_dict
)
if module.params["recommended_actions"]:
if len(module.params["recommended_actions"]) == 1:
request_post_data[
"action.notable.param.recommended_actions"
] = module.params["recommended_actions"][0]
else:
request_post_data[
"action.notable.param.recommended_actions"
] = ",".join(module.params["recommended_actions"])
request_post_data["action.notable.param.rule_description"] = module.params[
"description"
]
request_post_data["action.notable.param.rule_title"] = module.params[
"name"
]
request_post_data["action.notable.param.security_domain"] = module.params[
"security_domain"
]
request_post_data["action.notable.param.severity"] = module.params[
"severity"
]
request_post_data["action.notable.param.asset_extraction"] = module.params[
"asset_extraction"
]
request_post_data[
"action.notable.param.identity_extraction"
] = module.params["identity_extraction"]
# NOTE: this field appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
request_post_data["action.notable.param.verbose"] = "0"
if module.params["default_owner"]:
request_post_data[
"action.notable.param.default_owner"
] = module.params["default_owner"]
if module.params["default_status"]:
request_post_data[
"action.notable.param.default_status"
] = module.params["default_status"]
request_post_data = utils.remove_empties(request_post_data)
if query_dict:
request_post_data["search"] = query_dict["entry"][0]["content"][
"search"
]
if "actions" in query_dict["entry"][0]["content"]:
if query_dict["entry"][0]["content"]["actions"] == "notable":
pass
elif (
len(query_dict["entry"][0]["content"]["actions"].split(","))
> 0
and "notable"
not in query_dict["entry"][0]["content"]["actions"]
):
request_post_data["actions"] = (
query_dict["entry"][0]["content"]["actions"] + ", notable"
)
else:
request_post_data["actions"] = "notable"
else:
module.fail_json(
msg="Unable to find correlation search: {0}",
splunk_data=query_dict,
)
if module.params["state"] == "present":
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
request_post_data[arg]
):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["correlation_search_name"])
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
msg="{0} updated.".format(
module.params["correlation_search_name"]
),
splunk_data=splunk_data,
)
if module.params["state"] == "absent":
# FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
needs_change = True
del query_dict["entry"][0]["content"][arg]
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["correlation_search_name"])
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
msg="{0} updated.".format(
module.params["correlation_search_name"]
),
splunk_data=splunk_data,
)
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,376 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: correlation_search
short_description: Manage Splunk Enterprise Security Correlation Searches
description:
- This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches
version_added: "1.0.0"
deprecated:
alternative: splunk_correlation_searches
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
name:
description:
- Name of coorelation search
required: True
type: str
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
required: True
type: str
state:
description:
- Add, remove, enable, or disiable a correlation search.
required: True
choices: [ "present", "absent", "enabled", "disabled" ]
type: str
search:
description:
- SPL search string
type: str
required: True
app:
description:
- Splunk app to associate the correlation seach with
type: str
required: False
default: "SplunkEnterpriseSecuritySuite"
ui_dispatch_context:
description:
- Set an app to use for links such as the drill-down search in a notable
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
required: False
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
required: False
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
required: False
default: "now"
cron_schedule:
description:
- Enter a cron-style schedule.
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
required: False
default: "*/5 * * * *"
scheduling:
description:
- Controls the way the scheduler computes the next execution time of a scheduled search.
- >
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
required: False
default: "real-time"
choices:
- "real-time"
- "continuous"
schedule_window:
description:
- Let report run at any time within a window that opens at its scheduled run time,
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
required: False
default: "0"
schedule_priority:
description:
- Raise the scheduling priority of a report. Set to "Higher" to prioritize
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
required: False
default: "Default"
choices:
- "Default"
- "Higher"
- "Highest"
trigger_alert_when:
description:
- Raise the scheduling priority of a report. Set to "Higher" to prioritize
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
required: False
default: "number of events"
choices:
- "number of events"
- "number of results"
- "number of hosts"
- "number of sources"
trigger_alert_when_condition:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
required: False
default: "greater than"
choices:
- "greater than"
- "less than"
- "equal to"
- "not equal to"
- "drops by"
- "rises by"
trigger_alert_when_value:
description:
- Value to pass to C(trigger_alert_when)
type: str
required: False
default: "10"
throttle_window_duration:
description:
- "How much time to ignore other events that match the field values specified in Fields to group by."
type: str
required: False
throttle_fields_to_group_by:
description:
- "Type the fields to consider for matching events for throttling."
type: str
required: False
suppress_alerts:
description:
- "To suppress alerts from this correlation search or not"
type: bool
required: False
default: False
notes:
- >
The following options are not yet supported:
throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of creating a correlation search with splunk.es.coorelation_search
splunk.es.correlation_search:
name: "Example Coorelation Search From Ansible"
description: "Example Coorelation Search From Ansible, description."
search: 'source="/var/log/snort.log"'
state: "present"
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
state=dict(
choices=["present", "absent", "enabled", "disabled"], required=True
),
search=dict(required=True, type="str"),
app=dict(
type="str", required=False, default="SplunkEnterpriseSecuritySuite"
),
ui_dispatch_context=dict(type="str", required=False),
time_earliest=dict(type="str", required=False, default="-24h"),
time_latest=dict(type="str", required=False, default="now"),
cron_schedule=dict(type="str", required=False, default="*/5 * * * *"),
scheduling=dict(
type="str",
required=False,
default="real-time",
choices=["real-time", "continuous"],
),
schedule_window=dict(type="str", required=False, default="0"),
schedule_priority=dict(
type="str",
required=False,
default="Default",
choices=["Default", "Higher", "Highest"],
),
trigger_alert_when=dict(
type="str",
required=False,
default="number of events",
choices=[
"number of events",
"number of results",
"number of hosts",
"number of sources",
],
),
trigger_alert_when_condition=dict(
type="str",
required=False,
default="greater than",
choices=[
"greater than",
"less than",
"equal to",
"not equal to",
"drops by",
"rises by",
],
),
trigger_alert_when_value=dict(
type="str", required=False, default="10"
),
throttle_window_duration=dict(type="str", required=False),
throttle_fields_to_group_by=dict(type="str", required=False),
suppress_alerts=dict(type="bool", required=False, default=False),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
if module.params["state"] in ["present", "enabled"]:
module_disabled_state = False
else:
module_disabled_state = True
splunk_request = SplunkRequest(
module,
override=False,
headers={"Content-Type": "application/x-www-form-urlencoded"},
not_rest_data_keys=["state"],
)
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["name"])
)
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
# Have to custom craft the data here because they overload the saved searches
# endpoint in the rest api and we want to hide the nuance from the user
request_post_data = {}
request_post_data["name"] = module.params["name"]
request_post_data["action.correlationsearch.enabled"] = "1"
request_post_data["is_scheduled"] = True
request_post_data["dispatch.rt_backfill"] = True
request_post_data["action.correlationsearch.label"] = module.params["name"]
request_post_data["description"] = module.params["description"]
request_post_data["search"] = module.params["search"]
request_post_data["request.ui_dispatch_app"] = module.params["app"]
if module.params["ui_dispatch_context"]:
request_post_data["request.ui_dispatch_context"] = module.params[
"ui_dispatch_context"
]
request_post_data["dispatch.earliest_time"] = module.params[
"time_earliest"
]
request_post_data["dispatch.latest_time"] = module.params["time_latest"]
request_post_data["cron_schedule"] = module.params["cron_schedule"]
if module.params["scheduling"] == "real-time":
request_post_data["realtime_schedule"] = True
else:
request_post_data["realtime_schedule"] = False
request_post_data["schedule_window"] = module.params["schedule_window"]
request_post_data["schedule_priority"] = module.params[
"schedule_priority"
].lower()
request_post_data["alert_type"] = module.params["trigger_alert_when"]
request_post_data["alert_comparator"] = module.params[
"trigger_alert_when_condition"
]
request_post_data["alert_threshold"] = module.params[
"trigger_alert_when_value"
]
request_post_data["alert.suppress"] = module.params["suppress_alerts"]
request_post_data["disabled"] = module_disabled_state
request_post_data = utils.remove_empties(request_post_data)
if module.params["state"] in ["present", "enabled", "disabled"]:
if query_dict:
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(
query_dict["entry"][0]["content"][arg]
) != to_text(request_post_data[arg]):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
# FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled
del request_post_data[
"name"
] # If this is present, splunk assumes we're trying to create a new one wit the same name
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["name"])
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True, msg="{0} updated.", splunk_data=splunk_data
)
else:
# Create it
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
data=urlencode(request_post_data),
)
module.exit_json(
changed=True, msg="{0} created.", splunk_data=splunk_data
)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"services/saved/searches/{0}".format(
quote_plus(module.params["name"])
)
)
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,80 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: correlation_search_info
short_description: Manage Splunk Enterprise Security Correlation Searches
description:
- This module allows for the query of Splunk Enterprise Security Correlation Searches
version_added: "1.0.0"
options:
name:
description:
- Name of coorelation search
required: false
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example usage of splunk.es.correlation_search_info
splunk.es.correlation_search_info:
name: "Name of correlation search"
register: scorrelation_search_info
- name: debug display information gathered
debug:
var: scorrelation_search_info
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(name=dict(required=False, type="str"))
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
splunk_request = SplunkRequest(
module,
headers={"Content-Type": "application/json"},
)
if module.params["name"]:
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["name"])
)
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
else:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
)
module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,264 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: data_input_monitor
short_description: Manage Splunk Data Inputs of type Monitor
description:
- This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk.
version_added: "1.0.0"
deprecated:
alternative: splunk_data_inputs_monitor
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
name:
description:
- The file or directory path to monitor on the system.
required: True
type: str
state:
description:
- Add or remove a data source.
required: True
choices:
- "present"
- "absent"
type: str
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
required: False
type: str
check_index:
description:
- If set to C(True), the index value is checked to ensure that it is the name of a valid index.
required: False
type: bool
default: False
check_path:
description:
- If set to C(True), the name value is checked to ensure that it exists.
required: False
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
required: False
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
required: False
default: False
type: bool
followTail:
description:
- If set to C(True), files that are seen for the first time is read from the end.
required: False
type: bool
default: False
host:
description:
- The value to populate in the host field for events from this data input.
required: False
type: str
host_regex:
description:
- Specify a regular expression for a file path. If the path for a file
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
required: False
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
required: False
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
required: False
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
required: False
type: str
recursive:
description:
- Setting this to False prevents monitoring of any subdirectories encountered within this data input.
required: False
type: bool
default: False
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
required: False
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
required: False
type: str
time_before_close:
description:
- When Splunk software reaches the end of a file that is being read, the
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
required: False
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
required: False
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
EXAMPLES = """
- name: Example adding data input monitor with splunk.es.data_input_monitor
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
recursive: True
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
name=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
blacklist=dict(required=False, type="str", default=None),
check_index=dict(required=False, type="bool", default=False),
check_path=dict(required=False, type="bool", default=None),
crc_salt=dict(required=False, type="str", default=None),
disabled=dict(required=False, type="bool", default=False),
followTail=dict(required=False, type="bool", default=False),
host=dict(required=False, type="str", default=None),
host_segment=dict(required=False, type="int", default=None),
host_regex=dict(required=False, type="str", default=None),
ignore_older_than=dict(required=False, type="str", default=None),
index=dict(required=False, type="str", default=None),
recursive=dict(required=False, type="bool", default=False),
rename_source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
time_before_close=dict(required=False, type="int", default=None),
whitelist=dict(required=False, type="str", default=None),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
# map of keys for the splunk REST API that aren't pythonic so we have to
# handle the substitutes
keymap = {
"check_index": "check-index",
"check_path": "check-path",
"crc_salt": "crc-salt",
"ignore_older_than": "ignore-older-than",
"rename_source": "rename-source",
"time_before_close": "time-before-close",
}
splunk_request = SplunkRequest(
module,
headers={"Content-Type": "application/x-www-form-urlencoded"},
keymap=keymap,
not_rest_data_keys=["state"],
)
# This is where the splunk_* args are processed
request_data = splunk_request.get_data()
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
quote_plus(module.params["name"])
)
)
query_dict = utils.remove_empties(query_dict)
if module.params["state"] == "present":
if query_dict:
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(
query_dict["entry"][0]["content"][arg]
) != to_text(request_data[arg]):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
quote_plus(module.params["name"])
)
)
module.exit_json(
changed=True, msg="{0} updated.", splunk_data=splunk_data
)
else:
# Create it
_data = splunk_request.get_data()
_data["name"] = module.params["name"]
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor",
data=_data,
)
module.exit_json(
changed=True, msg="{0} created.", splunk_data=splunk_data
)
if module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
quote_plus(module.params["name"])
)
)
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,276 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: data_input_network
short_description: Manage Splunk Data Inputs of type TCP or UDP
description:
- This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk.
version_added: "1.0.0"
deprecated:
alternative: splunk_data_inputs_network
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
protocol:
description:
- Choose between tcp or udp
required: True
choices:
- 'tcp'
- 'udp'
type: str
connection_host:
description:
- Set the host for the remote server that is sending data.
- C(ip) sets the host to the IP address of the remote server sending data.
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
default: "ip"
required: False
type: str
choices:
- "ip"
- "dns"
- "none"
state:
description:
- Enable, disable, create, or destroy
choices:
- "present"
- "absent"
- "enabled"
- "disable"
required: False
default: "present"
type: str
datatype:
description: >
Forwarders can transmit three types of data: raw, unparsed, or parsed.
C(cooked) data refers to parsed and unparsed formats.
choices:
- "cooked"
- "raw"
default: "raw"
required: False
type: str
host:
description:
- Host from which the indexer gets data.
required: False
type: str
index:
description:
- default Index to store generated events.
type: str
name:
description:
- The input port which receives raw data.
required: True
type: str
queue:
description:
- Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
- Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
the online documentation at "Monitor files and directories with inputs.conf"
- Set queue to indexQueue to send your data directly into the index.
choices:
- "parsingQueue"
- "indexQueue"
type: str
required: False
default: "parsingQueue"
rawTcpDoneTimeout:
description:
- Specifies in seconds the timeout value for adding a Done-key.
- If a connection over the port specified by name remains idle after receiving data for specified
number of seconds, it adds a Done-key. This implies the last event is completely received.
default: 10
type: int
required: False
restrictToHost:
description:
- Allows for restricting this input to only accept data from the host specified here.
required: False
type: str
ssl:
description:
- Enable or disble ssl for the data stream
required: False
type: bool
source:
description:
- Sets the source key/field for events from this input. Defaults to the input file path.
- >
Sets the source key initial value. The key is used during parsing/indexing, in particular to set
the source field during indexing. It is also the source field used at search time. As a convenience,
the chosen string is prepended with 'source::'.
- >
Note: Overriding the source key is generally not recommended. Typically, the input layer provides a
more accurate string to aid in problem analysis and investigation, accurately recording the file from
which the data was retrieved. Consider use of source types, tagging, and search wildcards before
overriding this value.
type: str
sourcetype:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
EXAMPLES = """
- name: Example adding data input network with splunk.es.data_input_network
splunk.es.data_input_network:
name: "8099"
protocol: "tcp"
state: "present"
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
state=dict(
required=False,
choices=["present", "absent", "enabled", "disable"],
default="present",
type="str",
),
connection_host=dict(
required=False,
choices=["ip", "dns", "none"],
default="ip",
type="str",
),
host=dict(required=False, type="str", default=None),
index=dict(required=False, type="str", default=None),
name=dict(required=True, type="str"),
protocol=dict(required=True, type="str", choices=["tcp", "udp"]),
queue=dict(
required=False,
type="str",
choices=["parsingQueue", "indexQueue"],
default="parsingQueue",
),
rawTcpDoneTimeout=dict(required=False, type="int", default=10),
restrictToHost=dict(required=False, type="str", default=None),
ssl=dict(required=False, type="bool", default=None),
source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
datatype=dict(
required=False, choices=["cooked", "raw"], default="raw"
),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
splunk_request = SplunkRequest(
module,
headers={"Content-Type": "application/x-www-form-urlencoded"},
not_rest_data_keys=["state", "datatype", "protocol"],
)
# This is where the splunk_* args are processed
request_data = splunk_request.get_data()
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
)
)
if module.params["state"] in ["present", "enabled", "disabled"]:
_data = splunk_request.get_data()
if module.params["state"] in ["present", "enabled"]:
_data["disabled"] = False
else:
_data["disabled"] = True
if query_dict:
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(
query_dict["entry"][0]["content"][arg]
) != to_text(request_data[arg]):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
),
data=_data,
)
if module.params["state"] in ["present", "enabled"]:
module.exit_json(
changed=True, msg="{0} updated.", splunk_data=splunk_data
)
else:
module.exit_json(
changed=True, msg="{0} disabled.", splunk_data=splunk_data
)
else:
# Create it
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/{0}/{1}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
),
data=_data,
)
module.exit_json(
changed=True, msg="{0} created.", splunk_data=splunk_data
)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
)
)
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
module.exit_json(changed=False, msg="Nothing to do.", splunk_data={})
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,462 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: adaptive_response_notable_event
short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
description:
- This module allows for creation, deletion, and modification of Splunk
Enterprise Security Notable Event Adaptive Responses that are associated
with a correlation search
version_added: "1.0.0"
deprecated:
alternative: splunk_adaptive_response_notable_events
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
name:
description:
- Name of notable event
required: true
type: str
correlation_search_name:
description:
- Name of correlation search to associate this notable event adaptive response with
required: true
type: str
description:
description:
- Description of the notable event, this will populate the description field for the web console
required: true
type: str
state:
description:
- Add or remove a data source.
required: true
choices: [ "present", "absent" ]
type: str
security_domain:
description:
- Splunk Security Domain
type: str
required: False
choices:
- "access"
- "endpoint"
- "network"
- "threat"
- "identity"
- "audit"
default: "threat"
severity:
description:
- Severity rating
type: str
required: False
choices:
- "informational"
- "low"
- "medium"
- "high"
- "critical"
- "unknown"
default: "high"
default_owner:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
required: False
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
required: False
choices:
- "unassigned"
- "new"
- "in progress"
- "pending"
- "resolved"
- "closed"
drill_down_name:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
required: False
drill_down_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
required: False
drill_down_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
to match the earliest time of the search
type: str
required: False
default: \"$info_min_time$\"
drill_down_latest_offset:
description:
- Set the amount of time after the triggering event to search for related
events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
time to match the latest time of the search
type: str
required: False
default: \"$info_max_time$\"
investigation_profiles:
description:
- Investigation profile to assiciate the notable event with.
type: str
required: False
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
required: False
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
- Identifying Recommended Adaptive Responses will highlight those actions
for the analyst when looking at the list of response actions available,
making it easier to find them among the longer list of available actions.
type: list
elements: str
required: False
asset_extraction:
description:
- list of assets to extract, select any one or many of the available choices
- defaults to all available choices
type: list
elements: str
choices:
- src
- dest
- dvc
- orig_host
default:
- src
- dest
- dvc
- orig_host
required: False
identity_extraction:
description:
- list of identity fields to extract, select any one or many of the available choices
- defaults to all available choices
type: list
elements: str
choices:
- user
- src_user
default:
- user
- src_user
required: False
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of using splunk.es.adaptive_response_notable_event module
splunk.es.adaptive_response_notable_event:
name: "Example notable event from Ansible"
correlation_search_name: "Example Correlation Search From Ansible"
description: "Example notable event from Ansible, description."
state: "present"
next_steps:
- ping
- nslookup
recommended_actions:
- script
- ansiblesecurityautomation
"""
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
name=dict(required=True, type="str"),
correlation_search_name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
security_domain=dict(
choices=[
"access",
"endpoint",
"network",
"threat",
"identity",
"audit",
],
required=False,
default="threat",
),
severity=dict(
choices=[
"informational",
"low",
"medium",
"high",
"critical",
"unknown",
],
required=False,
default="high",
),
default_owner=dict(required=False, type="str"),
default_status=dict(
choices=[
"unassigned",
"new",
"in progress",
"pending",
"resolved",
"closed",
],
required=False,
),
drill_down_name=dict(required=False, type="str"),
drill_down_search=dict(required=False, type="str"),
drill_down_earliest_offset=dict(
required=False, type="str", default="$info_min_time$"
),
drill_down_latest_offset=dict(
required=False, type="str", default="$info_max_time$"
),
investigation_profiles=dict(required=False, type="str"),
next_steps=dict(
required=False, type="list", elements="str", default=[]
),
recommended_actions=dict(
required=False, type="list", elements="str", default=[]
),
asset_extraction=dict(
required=False,
type="list",
elements="str",
default=["src", "dest", "dvc", "orig_host"],
choices=["src", "dest", "dvc", "orig_host"],
),
identity_extraction=dict(
required=False,
type="list",
elements="str",
default=["user", "src_user"],
choices=["user", "src_user"],
),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
splunk_request = SplunkRequest(
module,
override=False,
headers={"Content-Type": "application/x-www-form-urlencoded"},
not_rest_data_keys=["state"],
)
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["correlation_search_name"])
)
)
# Have to custom craft the data here because they overload the saved searches
# endpoint in the rest api and we want to hide the nuance from the user
request_post_data = {}
# FIXME need to figure out how to properly support these, the possible values appear to
# be dynamically created based on what the search is indexing
# request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]'
# request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
if module.params["next_steps"]:
if len(module.params["next_steps"]) == 1:
next_steps = "[[action|{0}]]".format(
module.params["next_steps"][0]
)
else:
next_steps = ""
for next_step in module.params["next_steps"]:
if next_steps:
next_steps += "\n[[action|{0}]]".format(next_step)
else:
next_steps = "[[action|{0}]]".format(next_step)
# NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
next_steps_dict = {"version": 1, "data": next_steps}
request_post_data["action.notable.param.next_steps"] = json.dumps(
next_steps_dict
)
if module.params["recommended_actions"]:
if len(module.params["recommended_actions"]) == 1:
request_post_data[
"action.notable.param.recommended_actions"
] = module.params["recommended_actions"][0]
else:
request_post_data[
"action.notable.param.recommended_actions"
] = ",".join(module.params["recommended_actions"])
request_post_data["action.notable.param.rule_description"] = module.params[
"description"
]
request_post_data["action.notable.param.rule_title"] = module.params[
"name"
]
request_post_data["action.notable.param.security_domain"] = module.params[
"security_domain"
]
request_post_data["action.notable.param.severity"] = module.params[
"severity"
]
request_post_data["action.notable.param.asset_extraction"] = module.params[
"asset_extraction"
]
request_post_data[
"action.notable.param.identity_extraction"
] = module.params["identity_extraction"]
# NOTE: this field appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
request_post_data["action.notable.param.verbose"] = "0"
if module.params["default_owner"]:
request_post_data[
"action.notable.param.default_owner"
] = module.params["default_owner"]
if module.params["default_status"]:
request_post_data[
"action.notable.param.default_status"
] = module.params["default_status"]
request_post_data = utils.remove_empties(request_post_data)
if query_dict:
request_post_data["search"] = query_dict["entry"][0]["content"][
"search"
]
if "actions" in query_dict["entry"][0]["content"]:
if query_dict["entry"][0]["content"]["actions"] == "notable":
pass
elif (
len(query_dict["entry"][0]["content"]["actions"].split(","))
> 0
and "notable"
not in query_dict["entry"][0]["content"]["actions"]
):
request_post_data["actions"] = (
query_dict["entry"][0]["content"]["actions"] + ", notable"
)
else:
request_post_data["actions"] = "notable"
else:
module.fail_json(
msg="Unable to find correlation search: {0}",
splunk_data=query_dict,
)
if module.params["state"] == "present":
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
request_post_data[arg]
):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["correlation_search_name"])
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
msg="{0} updated.".format(
module.params["correlation_search_name"]
),
splunk_data=splunk_data,
)
if module.params["state"] == "absent":
# FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
needs_change = True
del query_dict["entry"][0]["content"][arg]
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["correlation_search_name"])
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
msg="{0} updated.".format(
module.params["correlation_search_name"]
),
splunk_data=splunk_data,
)
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,512 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2022 Red Hat
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: splunk_adaptive_response_notable_events
short_description: Manage Adaptive Responses notable events resource module
description:
- This module allows for creation, deletion, and modification of Splunk
Enterprise Security Notable Event Adaptive Responses that are associated
with a correlation search
- Tested against Splunk Enterprise Server 8.2.3
version_added: "2.1.0"
options:
config:
description:
- Configure file and directory monitoring on the system
type: list
elements: dict
suboptions:
name:
description:
- Name of notable event
type: str
correlation_search_name:
description:
- Name of correlation search to associate this notable event adaptive response with
required: true
type: str
description:
description:
- Description of the notable event, this will populate the description field for the web console
type: str
security_domain:
description:
- Splunk Security Domain
type: str
choices:
- "access"
- "endpoint"
- "network"
- "threat"
- "identity"
- "audit"
default: "threat"
severity:
description:
- Severity rating
type: str
choices:
- "informational"
- "low"
- "medium"
- "high"
- "critical"
- "unknown"
default: "high"
default_owner:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
choices:
- "unassigned"
- "new"
- "in progress"
- "pending"
- "resolved"
- "closed"
drilldown_name:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
drilldown_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
drilldown_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use '$info_min_time$' to set the drill-down time
to match the earliest time of the search
type: str
default: '$info_min_time$'
drilldown_latest_offset:
description:
- Set the amount of time after the triggering event to search for related
events. For example, 1m. Use '$info_max_time$' to set the drill-down
time to match the latest time of the search
type: str
default: '$info_max_time$'
investigation_profiles:
description:
- Investigation profile to associate the notable event with.
type: list
elements: str
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
- Identifying Recommended Adaptive Responses will highlight those actions
for the analyst when looking at the list of response actions available,
making it easier to find them among the longer list of available actions.
type: list
elements: str
extract_artifacts:
description:
- Assets and identities to be extracted
type: dict
suboptions:
asset:
description:
- list of assets to extract, select any one or many of the available choices
- defaults to all available choices
type: list
elements: str
choices:
- src
- dest
- dvc
- orig_host
file:
description:
- list of files to extract
type: list
elements: str
identity:
description:
- list of identity fields to extract, select any one or many of the available choices
- defaults to 'user' and 'src_user'
type: list
elements: str
choices:
- user
- src_user
- src_user_id
- user_id
- src_user_role
- user_role
- vendor_account
url:
description:
- list of URLs to extract
type: list
elements: str
running_config:
description:
- The module, by default, will connect to the remote device and retrieve the current
running-config to use as a base for comparing against the contents of source.
There are times when it is not desirable to have the task get the current running-config
for every task in a playbook. The I(running_config) argument allows the implementer
to pass in the configuration to use as the base config for comparison. This
value of this option should be the output received from device by executing
command.
type: str
state:
description:
- The state the configuration should be left in
type: str
choices:
- merged
- replaced
- deleted
- gathered
default: merged
author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
"""
EXAMPLES = """
# Using gathered
# --------------
- name: Gather adaptive response notable events config
splunk.es.splunk_adaptive_response_notable_events:
config:
- correlation_search_name: Ansible Test
- correlation_search_name: Ansible Test 2
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "correlation_search_name": "Ansible Test",
# "description": "test notable event",
# "drilldown_earliest_offset": "$info_min_time$",
# "drilldown_latest_offset": "$info_max_time$",
# "drilldown_name": "test_drill_name",
# "drilldown_search": "test_drill",
# "extract_artifacts": {
# "asset": [
# "src",
# "dest",
# "dvc",
# "orig_host"
# ],
# "identity": [
# "src_user",
# "user",
# "src_user_id",
# "src_user_role",
# "user_id",
# "user_role",
# "vendor_account"
# ]
# },
# "investigation_profiles": [
# "test profile 1",
# "test profile 2",
# "test profile 3"
# ],
# "next_steps": [
# "makestreams",
# "nbtstat",
# "nslookup"
# ],
# "name": "ansible_test_notable",
# "recommended_actions": [
# "email",
# "logevent",
# "makestreams",
# "nbtstat"
# ],
# "security_domain": "threat",
# "severity": "high"
# },
# { } # there is no configuration associated with "/var"
# ]
# Using merged
# ------------
- name: Example to add config
splunk.es.splunk_adaptive_response_notable_events:
config:
- correlation_search_name: Ansible Test
description: test notable event
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
asset:
- src
- dest
identity:
- src_user
- user
- src_user_id
next_steps:
- makestreams
name: ansible_test_notable
recommended_actions:
- email
- logevent
security_domain: threat
severity: high
state: merged
# RUN output:
# -----------
# "after": [
# {
# "correlation_search_name": "Ansible Test",
# "description": "test notable event",
# "drilldown_earliest_offset": "$info_min_time$",
# "drilldown_latest_offset": "$info_max_time$",
# "drilldown_name": "test_drill_name",
# "drilldown_search": "test_drill",
# "extract_artifacts": {
# "asset": [
# "src",
# "dest",
# "dvc",
# "orig_host"
# ],
# "identity": [
# "src_user",
# "user",
# "src_user_id",
# "src_user_role",
# "user_id",
# "user_role",
# "vendor_account"
# ]
# },
# "investigation_profiles": [
# "test profile 1",
# "test profile 2",
# "test profile 3"
# ],
# "next_steps": [
# "makestreams",
# "nbtstat",
# "nslookup"
# ],
# "name": "ansible_test_notable",
# "recommended_actions": [
# "email",
# "logevent",
# "makestreams",
# "nbtstat"
# ],
# "security_domain": "threat",
# "severity": "high"
# }
# ],
# "before": [],
# Using replaced
# --------------
- name: Example to Replace the config
splunk.es.splunk_adaptive_response_notable_events:
config:
- correlation_search_name: Ansible Test
description: test notable event
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
asset:
- src
- dest
identity:
- src_user
- user
- src_user_id
next_steps:
- makestreams
name: ansible_test_notable
recommended_actions:
- email
- logevent
security_domain: threat
severity: high
state: replaced
# RUN output:
# -----------
# "after": [
# {
# "correlation_search_name": "Ansible Test",
# "description": "test notable event",
# "drilldown_earliest_offset": "$info_min_time$",
# "drilldown_latest_offset": "$info_max_time$",
# "extract_artifacts": {
# "asset": [
# "src",
# "dest"
# ],
# "identity": [
# "src_user",
# "user",
# "src_user_id"
# ]
# },
# "next_steps": [
# "makestreams"
# ],
# "name": "ansible_test_notable",
# "recommended_actions": [
# "email",
# "logevent"
# ],
# "security_domain": "threat",
# "severity": "high"
# }
# ],
# "before": [
# {
# "correlation_search_name": "Ansible Test",
# "description": "test notable event",
# "drilldown_earliest_offset": "$info_min_time$",
# "drilldown_latest_offset": "$info_max_time$",
# "drilldown_name": "test_drill_name",
# "drilldown_search": "test_drill",
# "extract_artifacts": {
# "asset": [
# "src",
# "dest",
# "dvc",
# "orig_host"
# ],
# "identity": [
# "src_user",
# "user",
# "src_user_id",
# "src_user_role",
# "user_id",
# "user_role",
# "vendor_account"
# ]
# },
# "investigation_profiles": [
# "test profile 1",
# "test profile 2",
# "test profile 3"
# ],
# "next_steps": [
# "makestreams",
# "nbtstat",
# "nslookup"
# ],
# "name": "ansible_test_notable",
# "recommended_actions": [
# "email",
# "logevent",
# "makestreams",
# "nbtstat"
# ],
# "security_domain": "threat",
# "severity": "high"
# }
# ],
# USING DELETED
# -------------
- name: Example to remove the config
splunk.es.splunk_adaptive_response_notable_events:
config:
- correlation_search_name: Ansible Test
state: deleted
# RUN output:
# -----------
# "after": [],
# "before": [
# {
# "correlation_search_name": "Ansible Test",
# "description": "test notable event",
# "drilldown_earliest_offset": "$info_min_time$",
# "drilldown_latest_offset": "$info_max_time$",
# "drilldown_name": "test_drill_name",
# "drilldown_search": "test_drill",
# "extract_artifacts": {
# "asset": [
# "src",
# "dest",
# "dvc",
# "orig_host"
# ],
# "identity": [
# "src_user",
# "user",
# "src_user_id",
# "src_user_role",
# "user_id",
# "user_role",
# "vendor_account"
# ]
# },
# "investigation_profiles": [
# "test profile 1",
# "test profile 2",
# "test profile 3"
# ],
# "next_steps": [
# "makestreams",
# "nbtstat",
# "nslookup"
# ],
# "name": "ansible_test_notable",
# "recommended_actions": [
# "email",
# "logevent",
# "makestreams",
# "nbtstat"
# ],
# "security_domain": "threat",
# "severity": "high"
# }
# ]
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: The configuration returned will always be in the same format of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: The configuration returned will always be in the same format of the parameters above.
gathered:
description: Facts about the network resource gathered from the remote device as structured data.
returned: when state is I(gathered)
type: dict
sample: >
This output will always be in the same format as the
module argspec.
"""

View File

@@ -0,0 +1,376 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: correlation_search
short_description: Manage Splunk Enterprise Security Correlation Searches
description:
- This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches
version_added: "1.0.0"
deprecated:
alternative: splunk_correlation_searches
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
name:
description:
- Name of coorelation search
required: True
type: str
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
required: True
type: str
state:
description:
- Add, remove, enable, or disiable a correlation search.
required: True
choices: [ "present", "absent", "enabled", "disabled" ]
type: str
search:
description:
- SPL search string
type: str
required: True
app:
description:
- Splunk app to associate the correlation seach with
type: str
required: False
default: "SplunkEnterpriseSecuritySuite"
ui_dispatch_context:
description:
- Set an app to use for links such as the drill-down search in a notable
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
required: False
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
required: False
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
required: False
default: "now"
cron_schedule:
description:
- Enter a cron-style schedule.
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
required: False
default: "*/5 * * * *"
scheduling:
description:
- Controls the way the scheduler computes the next execution time of a scheduled search.
- >
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
required: False
default: "real-time"
choices:
- "real-time"
- "continuous"
schedule_window:
description:
- Let report run at any time within a window that opens at its scheduled run time,
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
required: False
default: "0"
schedule_priority:
description:
- Raise the scheduling priority of a report. Set to "Higher" to prioritize
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
required: False
default: "Default"
choices:
- "Default"
- "Higher"
- "Highest"
trigger_alert_when:
description:
- Raise the scheduling priority of a report. Set to "Higher" to prioritize
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
required: False
default: "number of events"
choices:
- "number of events"
- "number of results"
- "number of hosts"
- "number of sources"
trigger_alert_when_condition:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
required: False
default: "greater than"
choices:
- "greater than"
- "less than"
- "equal to"
- "not equal to"
- "drops by"
- "rises by"
trigger_alert_when_value:
description:
- Value to pass to C(trigger_alert_when)
type: str
required: False
default: "10"
throttle_window_duration:
description:
- "How much time to ignore other events that match the field values specified in Fields to group by."
type: str
required: False
throttle_fields_to_group_by:
description:
- "Type the fields to consider for matching events for throttling."
type: str
required: False
suppress_alerts:
description:
- "To suppress alerts from this correlation search or not"
type: bool
required: False
default: False
notes:
- >
The following options are not yet supported:
throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of creating a correlation search with splunk.es.coorelation_search
splunk.es.correlation_search:
name: "Example Coorelation Search From Ansible"
description: "Example Coorelation Search From Ansible, description."
search: 'source="/var/log/snort.log"'
state: "present"
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
state=dict(
choices=["present", "absent", "enabled", "disabled"], required=True
),
search=dict(required=True, type="str"),
app=dict(
type="str", required=False, default="SplunkEnterpriseSecuritySuite"
),
ui_dispatch_context=dict(type="str", required=False),
time_earliest=dict(type="str", required=False, default="-24h"),
time_latest=dict(type="str", required=False, default="now"),
cron_schedule=dict(type="str", required=False, default="*/5 * * * *"),
scheduling=dict(
type="str",
required=False,
default="real-time",
choices=["real-time", "continuous"],
),
schedule_window=dict(type="str", required=False, default="0"),
schedule_priority=dict(
type="str",
required=False,
default="Default",
choices=["Default", "Higher", "Highest"],
),
trigger_alert_when=dict(
type="str",
required=False,
default="number of events",
choices=[
"number of events",
"number of results",
"number of hosts",
"number of sources",
],
),
trigger_alert_when_condition=dict(
type="str",
required=False,
default="greater than",
choices=[
"greater than",
"less than",
"equal to",
"not equal to",
"drops by",
"rises by",
],
),
trigger_alert_when_value=dict(
type="str", required=False, default="10"
),
throttle_window_duration=dict(type="str", required=False),
throttle_fields_to_group_by=dict(type="str", required=False),
suppress_alerts=dict(type="bool", required=False, default=False),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
if module.params["state"] in ["present", "enabled"]:
module_disabled_state = False
else:
module_disabled_state = True
splunk_request = SplunkRequest(
module,
override=False,
headers={"Content-Type": "application/x-www-form-urlencoded"},
not_rest_data_keys=["state"],
)
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["name"])
)
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
# Have to custom craft the data here because they overload the saved searches
# endpoint in the rest api and we want to hide the nuance from the user
request_post_data = {}
request_post_data["name"] = module.params["name"]
request_post_data["action.correlationsearch.enabled"] = "1"
request_post_data["is_scheduled"] = True
request_post_data["dispatch.rt_backfill"] = True
request_post_data["action.correlationsearch.label"] = module.params["name"]
request_post_data["description"] = module.params["description"]
request_post_data["search"] = module.params["search"]
request_post_data["request.ui_dispatch_app"] = module.params["app"]
if module.params["ui_dispatch_context"]:
request_post_data["request.ui_dispatch_context"] = module.params[
"ui_dispatch_context"
]
request_post_data["dispatch.earliest_time"] = module.params[
"time_earliest"
]
request_post_data["dispatch.latest_time"] = module.params["time_latest"]
request_post_data["cron_schedule"] = module.params["cron_schedule"]
if module.params["scheduling"] == "real-time":
request_post_data["realtime_schedule"] = True
else:
request_post_data["realtime_schedule"] = False
request_post_data["schedule_window"] = module.params["schedule_window"]
request_post_data["schedule_priority"] = module.params[
"schedule_priority"
].lower()
request_post_data["alert_type"] = module.params["trigger_alert_when"]
request_post_data["alert_comparator"] = module.params[
"trigger_alert_when_condition"
]
request_post_data["alert_threshold"] = module.params[
"trigger_alert_when_value"
]
request_post_data["alert.suppress"] = module.params["suppress_alerts"]
request_post_data["disabled"] = module_disabled_state
request_post_data = utils.remove_empties(request_post_data)
if module.params["state"] in ["present", "enabled", "disabled"]:
if query_dict:
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(
query_dict["entry"][0]["content"][arg]
) != to_text(request_post_data[arg]):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
# FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled
del request_post_data[
"name"
] # If this is present, splunk assumes we're trying to create a new one wit the same name
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["name"])
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True, msg="{0} updated.", splunk_data=splunk_data
)
else:
# Create it
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
data=urlencode(request_post_data),
)
module.exit_json(
changed=True, msg="{0} created.", splunk_data=splunk_data
)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"services/saved/searches/{0}".format(
quote_plus(module.params["name"])
)
)
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,80 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: correlation_search_info
short_description: Manage Splunk Enterprise Security Correlation Searches
description:
- This module allows for the query of Splunk Enterprise Security Correlation Searches
version_added: "1.0.0"
options:
name:
description:
- Name of coorelation search
required: false
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example usage of splunk.es.correlation_search_info
splunk.es.correlation_search_info:
name: "Name of correlation search"
register: scorrelation_search_info
- name: debug display information gathered
debug:
var: scorrelation_search_info
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(name=dict(required=False, type="str"))
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
splunk_request = SplunkRequest(
module,
headers={"Content-Type": "application/json"},
)
if module.params["name"]:
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
quote_plus(module.params["name"])
)
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
else:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
)
module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,630 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2022 Red Hat
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: splunk_correlation_searches
short_description: Splunk Enterprise Security Correlation searches resource module
description:
- This module allows for creation, deletion, and modification of Splunk
Enterprise Security correlation searches
- Tested against Splunk Enterprise Server v8.2.3 with Splunk Enterprise Security v7.0.1
installed on it.
version_added: "2.1.0"
options:
config:
description:
- Configure file and directory monitoring on the system
type: list
elements: dict
suboptions:
name:
description:
- Name of correlation search
type: str
required: True
disabled:
description:
- Disable correlation search
type: bool
default: False
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
type: str
search:
description:
- SPL search string
type: str
app:
description:
- Splunk app to associate the correlation seach with
type: str
default: "SplunkEnterpriseSecuritySuite"
annotations:
description:
- Add context from industry standard cyber security mappings in Splunk Enterprise Security
or custom annotations
type: dict
suboptions:
cis20:
description:
- Specify CIS20 annotations
type: list
elements: str
kill_chain_phases:
description:
- Specify Kill 10 annotations
type: list
elements: str
mitre_attack:
description:
- Specify MITRE ATTACK annotations
type: list
elements: str
nist:
description:
- Specify NIST annotations
type: list
elements: str
custom:
description:
- Specify custom framework and custom annotations
type: list
elements: dict
suboptions:
framework:
description:
- Specify annotation framework
type: str
custom_annotations:
description:
- Specify annotations associated with custom framework
type: list
elements: str
ui_dispatch_context:
description:
- Set an app to use for links such as the drill-down search in a notable
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
default: "now"
cron_schedule:
description:
- Enter a cron-style schedule.
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
default: "*/5 * * * *"
scheduling:
description:
- Controls the way the scheduler computes the next execution time of a scheduled search.
- >
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
default: "realtime"
choices:
- "realtime"
- "continuous"
schedule_window:
description:
- Let report run at any time within a window that opens at its scheduled run time,
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
default: "0"
schedule_priority:
description:
- Raise the scheduling priority of a report. Set to "Higher" to prioritize
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
default: "default"
choices:
- "default"
- "higher"
- "highest"
trigger_alert:
description:
- Notable response actions and risk response actions are always triggered for each result.
Choose whether the trigger is activated once or for each result.
type: str
default: "once"
choices:
- "once"
- "for each result"
trigger_alert_when:
description:
- Raise the scheduling priority of a report. Set to "Higher" to prioritize
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
default: "number of events"
choices:
- "number of events"
- "number of results"
- "number of hosts"
- "number of sources"
trigger_alert_when_condition:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
default: "greater than"
choices:
- "greater than"
- "less than"
- "equal to"
- "not equal to"
- "drops by"
- "rises by"
trigger_alert_when_value:
description:
- Value to pass to C(trigger_alert_when)
type: str
default: "10"
throttle_window_duration:
description:
- How much time to ignore other events that match the field values specified in Fields to group by.
type: str
throttle_fields_to_group_by:
description:
- Type the fields to consider for matching events for throttling.
type: list
elements: str
suppress_alerts:
description:
- To suppress alerts from this correlation search or not
type: bool
default: False
running_config:
description:
- The module, by default, will connect to the remote device and retrieve the current
running-config to use as a base for comparing against the contents of source.
There are times when it is not desirable to have the task get the current running-config
for every task in a playbook. The I(running_config) argument allows the implementer
to pass in the configuration to use as the base config for comparison. This
value of this option should be the output received from device by executing
command.
type: str
state:
description:
- The state the configuration should be left in
type: str
choices:
- merged
- replaced
- deleted
- gathered
default: merged
author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
"""
EXAMPLES = """
# Using gathered
# --------------
- name: Gather correlation searches config
splunk.es.splunk_correlation_searches:
config:
- name: Ansible Test
- name: Ansible Test 2
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "annotations": {
# "cis20": [
# "test1"
# ],
# "custom": [
# {
# "custom_annotations": [
# "test5"
# ],
# "framework": "test_framework"
# }
# ],
# "kill_chain_phases": [
# "test3"
# ],
# "mitre_attack": [
# "test2"
# ],
# "nist": [
# "test4"
# ]
# },
# "app": "DA-ESS-EndpointProtection",
# "cron_schedule": "*/5 * * * *",
# "description": "test description",
# "disabled": false,
# "name": "Ansible Test",
# "schedule_priority": "default",
# "schedule_window": "0",
# "scheduling": "realtime",
# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
# 'n.src\" as \"src\" | where \"count\">=6',
# "suppress_alerts": false,
# "throttle_fields_to_group_by": [
# "test_field1"
# ],
# "throttle_window_duration": "5s",
# "time_earliest": "-24h",
# "time_latest": "now",
# "trigger_alert": "once",
# "trigger_alert_when": "number of events",
# "trigger_alert_when_condition": "greater than",
# "trigger_alert_when_value": "10",
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# }
# ]
# Using merged
# ------------
- name: Merge and create new correlation searches configuration
splunk.es.splunk_correlation_searches:
config:
- name: Ansible Test
disabled: false
description: test description
app: DA-ESS-EndpointProtection
annotations:
cis20:
- test1
mitre_attack:
- test2
kill_chain_phases:
- test3
nist:
- test4
custom:
- framework: test_framework
custom_annotations:
- test5
ui_dispatch_context: SplunkEnterpriseSecuritySuite
time_earliest: -24h
time_latest: now
cron_schedule: "*/5 * * * *"
scheduling: realtime
schedule_window: "0"
schedule_priority: default
trigger_alert: once
trigger_alert_when: number of events
trigger_alert_when_condition: greater than
trigger_alert_when_value: "10"
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
suppress_alerts: False
search: >
'| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
'n.src\" as \"src\" | where \"count\">=6'
state: merged
# RUN output:
# -----------
# "after": [
# {
# "annotations": {
# "cis20": [
# "test1"
# ],
# "custom": [
# {
# "custom_annotations": [
# "test5"
# ],
# "framework": "test_framework"
# }
# ],
# "kill_chain_phases": [
# "test3"
# ],
# "mitre_attack": [
# "test2"
# ],
# "nist": [
# "test4"
# ]
# },
# "app": "DA-ESS-EndpointProtection",
# "cron_schedule": "*/5 * * * *",
# "description": "test description",
# "disabled": false,
# "name": "Ansible Test",
# "schedule_priority": "default",
# "schedule_window": "0",
# "scheduling": "realtime",
# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
# 'n.src\" as \"src\" | where \"count\">=6',
# "suppress_alerts": false,
# "throttle_fields_to_group_by": [
# "test_field1"
# ],
# "throttle_window_duration": "5s",
# "time_earliest": "-24h",
# "time_latest": "now",
# "trigger_alert": "once",
# "trigger_alert_when": "number of events",
# "trigger_alert_when_condition": "greater than",
# "trigger_alert_when_value": "10",
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# },
# ],
# "before": [],
# Using replaced
# --------------
- name: Replace existing correlation searches configuration
splunk.es.splunk_correlation_searches:
state: replaced
config:
- name: Ansible Test
disabled: false
description: test description
app: SplunkEnterpriseSecuritySuite
annotations:
cis20:
- test1
- test2
mitre_attack:
- test3
- test4
kill_chain_phases:
- test5
- test6
nist:
- test7
- test8
custom:
- framework: test_framework2
custom_annotations:
- test9
- test10
ui_dispatch_context: SplunkEnterpriseSecuritySuite
time_earliest: -24h
time_latest: now
cron_schedule: "*/5 * * * *"
scheduling: continuous
schedule_window: auto
schedule_priority: default
trigger_alert: once
trigger_alert_when: number of events
trigger_alert_when_condition: greater than
trigger_alert_when_value: 10
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- test_field2
suppress_alerts: True
search: >
'| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
'n.src\" as \"src\" | where \"count\">=6'
# RUN output:
# -----------
# "after": [
# {
# "annotations": {
# "cis20": [
# "test1",
# "test2"
# ],
# "custom": [
# {
# "custom_annotations": [
# "test9",
# "test10"
# ],
# "framework": "test_framework2"
# }
# ],
# "kill_chain_phases": [
# "test5",
# "test6"
# ],
# "mitre_attack": [
# "test3",
# "test4"
# ],
# "nist": [
# "test7",
# "test8"
# ]
# },
# "app": "SplunkEnterpriseSecuritySuite",
# "cron_schedule": "*/5 * * * *",
# "description": "test description",
# "disabled": false,
# "name": "Ansible Test",
# "schedule_priority": "default",
# "schedule_window": "auto",
# "scheduling": "continuous",
# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
# 'n.src\" as \"src\" | where \"count\">=6',
# "suppress_alerts": true,
# "throttle_fields_to_group_by": [
# "test_field1",
# "test_field2"
# ],
# "throttle_window_duration": "5s",
# "time_earliest": "-24h",
# "time_latest": "now",
# "trigger_alert": "once",
# "trigger_alert_when": "number of events",
# "trigger_alert_when_condition": "greater than",
# "trigger_alert_when_value": "10",
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# }
# ],
# "before": [
# {
# "annotations": {
# "cis20": [
# "test1"
# ],
# "custom": [
# {
# "custom_annotations": [
# "test5"
# ],
# "framework": "test_framework"
# }
# ],
# "kill_chain_phases": [
# "test3"
# ],
# "mitre_attack": [
# "test2"
# ],
# "nist": [
# "test4"
# ]
# },
# "app": "DA-ESS-EndpointProtection",
# "cron_schedule": "*/5 * * * *",
# "description": "test description",
# "disabled": false,
# "name": "Ansible Test",
# "schedule_priority": "default",
# "schedule_window": "0",
# "scheduling": "realtime",
# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
# 'n.src\" as \"src\" | where \"count\">=6',
# "suppress_alerts": false,
# "throttle_fields_to_group_by": [
# "test_field1"
# ],
# "throttle_window_duration": "5s",
# "time_earliest": "-24h",
# "time_latest": "now",
# "trigger_alert": "once",
# "trigger_alert_when": "number of events",
# "trigger_alert_when_condition": "greater than",
# "trigger_alert_when_value": "10",
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# }
# ]
# Using deleted
# -------------
- name: Example to delete the corelation search
splunk.es.splunk_correlation_searches:
config:
- name: Ansible Test
state: deleted
# RUN output:
# -----------
# "after": [],
# "before": [
# {
# "annotations": {
# "cis20": [
# "test1"
# ],
# "custom": [
# {
# "custom_annotations": [
# "test5"
# ],
# "framework": "test_framework"
# }
# ],
# "kill_chain_phases": [
# "test3"
# ],
# "mitre_attack": [
# "test2"
# ],
# "nist": [
# "test4"
# ]
# },
# "app": "DA-ESS-EndpointProtection",
# "cron_schedule": "*/5 * * * *",
# "description": "test description",
# "disabled": false,
# "name": "Ansible Test",
# "schedule_priority": "default",
# "schedule_window": "0",
# "scheduling": "realtime",
# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
# 'n.src\" as \"src\" | where \"count\">=6',
# "suppress_alerts": false,
# "throttle_fields_to_group_by": [
# "test_field1"
# ],
# "throttle_window_duration": "5s",
# "time_earliest": "-24h",
# "time_latest": "now",
# "trigger_alert": "once",
# "trigger_alert_when": "number of events",
# "trigger_alert_when_condition": "greater than",
# "trigger_alert_when_value": "10",
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# },
# ],
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: The configuration returned will always be in the same format of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: The configuration returned will always be in the same format of the parameters above.
gathered:
description: Facts about the network resource gathered from the remote device as structured data.
returned: when state is I(gathered)
type: dict
sample: >
This output will always be in the same format as the
module argspec.
"""

View File

@@ -0,0 +1,264 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: data_input_monitor
short_description: Manage Splunk Data Inputs of type Monitor
description:
- This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk.
version_added: "1.0.0"
deprecated:
alternative: splunk_data_inputs_monitor
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
name:
description:
- The file or directory path to monitor on the system.
required: True
type: str
state:
description:
- Add or remove a data source.
required: True
choices:
- "present"
- "absent"
type: str
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
required: False
type: str
check_index:
description:
- If set to C(True), the index value is checked to ensure that it is the name of a valid index.
required: False
type: bool
default: False
check_path:
description:
- If set to C(True), the name value is checked to ensure that it exists.
required: False
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
required: False
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
required: False
default: False
type: bool
followTail:
description:
- If set to C(True), files that are seen for the first time is read from the end.
required: False
type: bool
default: False
host:
description:
- The value to populate in the host field for events from this data input.
required: False
type: str
host_regex:
description:
- Specify a regular expression for a file path. If the path for a file
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
required: False
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
required: False
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
required: False
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
required: False
type: str
recursive:
description:
- Setting this to False prevents monitoring of any subdirectories encountered within this data input.
required: False
type: bool
default: False
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
required: False
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
required: False
type: str
time_before_close:
description:
- When Splunk software reaches the end of a file that is being read, the
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
required: False
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
required: False
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
EXAMPLES = """
- name: Example adding data input monitor with splunk.es.data_input_monitor
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
recursive: True
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
name=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
blacklist=dict(required=False, type="str", default=None),
check_index=dict(required=False, type="bool", default=False),
check_path=dict(required=False, type="bool", default=None),
crc_salt=dict(required=False, type="str", default=None),
disabled=dict(required=False, type="bool", default=False),
followTail=dict(required=False, type="bool", default=False),
host=dict(required=False, type="str", default=None),
host_segment=dict(required=False, type="int", default=None),
host_regex=dict(required=False, type="str", default=None),
ignore_older_than=dict(required=False, type="str", default=None),
index=dict(required=False, type="str", default=None),
recursive=dict(required=False, type="bool", default=False),
rename_source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
time_before_close=dict(required=False, type="int", default=None),
whitelist=dict(required=False, type="str", default=None),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
# map of keys for the splunk REST API that aren't pythonic so we have to
# handle the substitutes
keymap = {
"check_index": "check-index",
"check_path": "check-path",
"crc_salt": "crc-salt",
"ignore_older_than": "ignore-older-than",
"rename_source": "rename-source",
"time_before_close": "time-before-close",
}
splunk_request = SplunkRequest(
module,
headers={"Content-Type": "application/x-www-form-urlencoded"},
keymap=keymap,
not_rest_data_keys=["state"],
)
# This is where the splunk_* args are processed
request_data = splunk_request.get_data()
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
quote_plus(module.params["name"])
)
)
query_dict = utils.remove_empties(query_dict)
if module.params["state"] == "present":
if query_dict:
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(
query_dict["entry"][0]["content"][arg]
) != to_text(request_data[arg]):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
quote_plus(module.params["name"])
)
)
module.exit_json(
changed=True, msg="{0} updated.", splunk_data=splunk_data
)
else:
# Create it
_data = splunk_request.get_data()
_data["name"] = module.params["name"]
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor",
data=_data,
)
module.exit_json(
changed=True, msg="{0} created.", splunk_data=splunk_data
)
if module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
quote_plus(module.params["name"])
)
)
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,276 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# https://github.com/ansible/ansible/issues/65816
# https://github.com/PyCQA/pylint/issues/214
# (c) 2018, Adam Miller (admiller@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: data_input_network
short_description: Manage Splunk Data Inputs of type TCP or UDP
description:
- This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk.
version_added: "1.0.0"
deprecated:
alternative: splunk_data_inputs_network
why: Newer and updated modules released with more functionality.
removed_at_date: '2024-09-01'
options:
protocol:
description:
- Choose between tcp or udp
required: True
choices:
- 'tcp'
- 'udp'
type: str
connection_host:
description:
- Set the host for the remote server that is sending data.
- C(ip) sets the host to the IP address of the remote server sending data.
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
default: "ip"
required: False
type: str
choices:
- "ip"
- "dns"
- "none"
state:
description:
- Enable, disable, create, or destroy
choices:
- "present"
- "absent"
- "enabled"
- "disable"
required: False
default: "present"
type: str
datatype:
description: >
Forwarders can transmit three types of data: raw, unparsed, or parsed.
C(cooked) data refers to parsed and unparsed formats.
choices:
- "cooked"
- "raw"
default: "raw"
required: False
type: str
host:
description:
- Host from which the indexer gets data.
required: False
type: str
index:
description:
- default Index to store generated events.
type: str
name:
description:
- The input port which receives raw data.
required: True
type: str
queue:
description:
- Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
- Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
the online documentation at "Monitor files and directories with inputs.conf"
- Set queue to indexQueue to send your data directly into the index.
choices:
- "parsingQueue"
- "indexQueue"
type: str
required: False
default: "parsingQueue"
rawTcpDoneTimeout:
description:
- Specifies in seconds the timeout value for adding a Done-key.
- If a connection over the port specified by name remains idle after receiving data for specified
number of seconds, it adds a Done-key. This implies the last event is completely received.
default: 10
type: int
required: False
restrictToHost:
description:
- Allows for restricting this input to only accept data from the host specified here.
required: False
type: str
ssl:
description:
- Enable or disble ssl for the data stream
required: False
type: bool
source:
description:
- Sets the source key/field for events from this input. Defaults to the input file path.
- >
Sets the source key initial value. The key is used during parsing/indexing, in particular to set
the source field during indexing. It is also the source field used at search time. As a convenience,
the chosen string is prepended with 'source::'.
- >
Note: Overriding the source key is generally not recommended. Typically, the input layer provides a
more accurate string to aid in problem analysis and investigation, accurately recording the file from
which the data was retrieved. Consider use of source types, tagging, and search wildcards before
overriding this value.
type: str
sourcetype:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
EXAMPLES = """
- name: Example adding data input network with splunk.es.data_input_network
splunk.es.data_input_network:
name: "8099"
protocol: "tcp"
state: "present"
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
)
def main():
argspec = dict(
state=dict(
required=False,
choices=["present", "absent", "enabled", "disable"],
default="present",
type="str",
),
connection_host=dict(
required=False,
choices=["ip", "dns", "none"],
default="ip",
type="str",
),
host=dict(required=False, type="str", default=None),
index=dict(required=False, type="str", default=None),
name=dict(required=True, type="str"),
protocol=dict(required=True, type="str", choices=["tcp", "udp"]),
queue=dict(
required=False,
type="str",
choices=["parsingQueue", "indexQueue"],
default="parsingQueue",
),
rawTcpDoneTimeout=dict(required=False, type="int", default=10),
restrictToHost=dict(required=False, type="str", default=None),
ssl=dict(required=False, type="bool", default=None),
source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
datatype=dict(
required=False, choices=["cooked", "raw"], default="raw"
),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
splunk_request = SplunkRequest(
module,
headers={"Content-Type": "application/x-www-form-urlencoded"},
not_rest_data_keys=["state", "datatype", "protocol"],
)
# This is where the splunk_* args are processed
request_data = splunk_request.get_data()
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
)
)
if module.params["state"] in ["present", "enabled", "disabled"]:
_data = splunk_request.get_data()
if module.params["state"] in ["present", "enabled"]:
_data["disabled"] = False
else:
_data["disabled"] = True
if query_dict:
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(
query_dict["entry"][0]["content"][arg]
) != to_text(request_data[arg]):
needs_change = True
if not needs_change:
module.exit_json(
changed=False, msg="Nothing to do.", splunk_data=query_dict
)
if module.check_mode and needs_change:
module.exit_json(
changed=True,
msg="A change would have been made if not in check mode.",
splunk_data=query_dict,
)
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
),
data=_data,
)
if module.params["state"] in ["present", "enabled"]:
module.exit_json(
changed=True, msg="{0} updated.", splunk_data=splunk_data
)
else:
module.exit_json(
changed=True, msg="{0} disabled.", splunk_data=splunk_data
)
else:
# Create it
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/{0}/{1}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
),
data=_data,
)
module.exit_json(
changed=True, msg="{0} created.", splunk_data=splunk_data
)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
)
)
module.exit_json(
changed=True,
msg="Deleted {0}.".format(module.params["name"]),
splunk_data=splunk_data,
)
module.exit_json(changed=False, msg="Nothing to do.", splunk_data={})
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,300 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2022 Red Hat
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: splunk_data_inputs_monitor
short_description: Splunk Data Inputs of type Monitor resource module
description:
- Module to add/modify or delete, File and Directory Monitor Data Inputs in Splunk.
- Tested against Splunk Enterprise Server 8.2.3
version_added: "2.1.0"
options:
config:
description:
- Configure file and directory monitoring on the system
type: list
elements: dict
suboptions:
name:
description:
- The file or directory path to monitor on the system.
required: True
type: str
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
type: str
check_index:
description:
- If set to C(True), the index value is checked to ensure that it is the name of a valid index.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: bool
check_path:
description:
- If set to C(True), the name value is checked to ensure that it exists.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
type: bool
default: False
follow_tail:
description:
- If set to C(True), files that are seen for the first time is read from the end.
type: bool
host:
description:
- The value to populate in the host field for events from this data input.
type: str
default: "$decideOnStartup"
host_regex:
description:
- Specify a regular expression for a file path. If the path for a file
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
type: str
default: "default"
recursive:
description:
- Setting this to False prevents monitoring of any subdirectories encountered within this data input.
type: bool
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
type: str
time_before_close:
description:
- When Splunk software reaches the end of a file that is being read, the
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
type: str
running_config:
description:
- The module, by default, will connect to the remote device and retrieve the current
running-config to use as a base for comparing against the contents of source.
There are times when it is not desirable to have the task get the current running-config
for every task in a playbook. The I(running_config) argument allows the implementer
to pass in the configuration to use as the base config for comparison. This
value of this option should be the output received from device by executing
command.
type: str
state:
description:
- The state the configuration should be left in
type: str
choices:
- merged
- replaced
- deleted
- gathered
default: merged
author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
"""
EXAMPLES = """
# Using gathered
# --------------
- name: Gather config for specified Data inputs monitors
splunk.es.splunk_data_inputs_monitor:
config:
- name: "/var/log"
- name: "/var"
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "blacklist": "//var/log/[a-z0-9]/gm",
# "crc_salt": "<SOURCE>",
# "disabled": false,
# "host": "$decideOnStartup",
# "host_regex": "/(test_host)/gm",
# "host_segment": 3,
# "index": "default",
# "name": "/var/log",
# "recursive": true,
# "sourcetype": "test_source",
# "whitelist": "//var/log/[0-9]/gm"
# }
# ]
#
# Using merged
# ------------
- name: Update Data inputs monitors config
splunk.es.splunk_data_inputs_monitor:
config:
- name: "/var/log"
blacklist: "//var/log/[a-z]/gm"
check_index: True
check_path: True
crc_salt: <SOURCE>
rename_source: "test"
whitelist: "//var/log/[0-9]/gm"
state: merged
# RUN output:
# -----------
# "after": [
# {
# "blacklist": "//var/log/[a-z]/gm",
# "crc_salt": "<SOURCE>",
# "disabled": false,
# "host": "$decideOnStartup",
# "host_regex": "/(test_host)/gm",
# "host_segment": 3,
# "index": "default",
# "name": "/var/log",
# "recursive": true,
# "sourcetype": "test_source",
# "whitelist": "//var/log/[0-9]/gm"
# }
# ],
# "before": [
# {
# "blacklist": "//var/log/[a-z0-9]/gm",
# "crc_salt": "<SOURCE>",
# "disabled": false,
# "host": "$decideOnStartup",
# "host_regex": "/(test_host)/gm",
# "host_segment": 3,
# "index": "default",
# "name": "/var/log",
# "recursive": true,
# "sourcetype": "test_source",
# "whitelist": "//var/log/[0-9]/gm"
# }
# ],
# Using replaced
# --------------
- name: To Replace Data inputs monitors config
splunk.es.splunk_data_inputs_monitor:
config:
- name: "/var/log"
blacklist: "//var/log/[a-z0-9]/gm"
crc_salt: <SOURCE>
index: default
state: replaced
# RUN output:
# -----------
# "after": [
# {
# "blacklist": "//var/log/[a-z0-9]/gm",
# "crc_salt": "<SOURCE>",
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "/var/log"
# }
# ],
# "before": [
# {
# "blacklist": "//var/log/[a-z0-9]/gm",
# "crc_salt": "<SOURCE>",
# "disabled": false,
# "host": "$decideOnStartup",
# "host_regex": "/(test_host)/gm",
# "host_segment": 3,
# "index": "default",
# "name": "/var/log",
# "recursive": true,
# "sourcetype": "test_source",
# "whitelist": "//var/log/[0-9]/gm"
# }
# ],
# Using deleted
# -----------
- name: To Delete Data inpur monitor config
splunk.es.splunk_data_inputs_monitor:
config:
- name: "/var/log"
state: deleted
# RUN output:
# -----------
#
# "after": [],
# "before": [
# {
# "blacklist": "//var/log/[a-z0-9]/gm",
# "crc_salt": "<SOURCE>",
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "/var/log"
# }
# ],
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: The configuration returned will always be in the same format of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: The configuration returned will always be in the same format of the parameters above.
"""

View File

@@ -0,0 +1,603 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2022 Red Hat
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
---
module: splunk_data_inputs_network
short_description: Manage Splunk Data Inputs of type TCP or UDP resource module
description:
- Module that allows to add/update or delete of TCP and UDP Data Inputs in Splunk.
version_added: "2.1.0"
options:
config:
description:
- Manage and preview protocol input data.
type: list
elements: dict
suboptions:
name:
description:
- The input port which receives raw data.
required: True
type: str
protocol:
description:
- Choose whether to manage TCP or UDP inputs
required: True
choices:
- 'tcp'
- 'udp'
type: str
connection_host:
description:
- Set the host for the remote server that is sending data.
- C(ip) sets the host to the IP address of the remote server sending data.
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
type: str
choices:
- "ip"
- "dns"
- "none"
datatype:
description:
- C(cooked) lets one access cooked TCP input information and create new containers for managing cooked data.
- C(raw) lets one manage raw tcp inputs from forwarders.
- C(splunktcptoken) lets one manage receiver access using tokens.
- C(ssl) Provides access to the SSL configuration of a Splunk server.
This option does not support states I(deleted) and I(replaced).
choices:
- "cooked"
- "raw"
- "splunktcptoken"
- "ssl"
required: False
type: str
disabled:
description:
- Indicates whether the input is disabled.
type: bool
host:
description:
- Host from which the indexer gets data.
type: str
index:
description:
- default Index to store generated events.
type: str
no_appending_timestamp:
description:
- If set to true, prevents Splunk software from prepending a timestamp and hostname to incoming events.
- Only for UDP data input configuration.
type: bool
no_priority_stripping:
description:
- If set to true, Splunk software does not remove the priority field from incoming syslog events.
- Only for UDP data input configuration.
type: bool
queue:
description:
- Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
- Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
the online documentation at "Monitor files and directories with inputs.conf"
- Set queue to indexQueue to send your data directly into the index.
- Only applicable for "/tcp/raw" and "/udp" APIs
choices:
- "parsingQueue"
- "indexQueue"
type: str
raw_tcp_done_timeout:
description:
- Specifies in seconds the timeout value for adding a Done-key.
- If a connection over the port specified by name remains idle after receiving data for specified
number of seconds, it adds a Done-key. This implies the last event is completely received.
- Only for TCP raw input configuration.
type: int
restrict_to_host:
description:
- Allows for restricting this input to only accept data from the host specified here.
type: str
ssl:
description:
- Enable or disble ssl for the data stream
type: bool
source:
description:
- Sets the source key/field for events from this input. Defaults to the input file path.
- Sets the source key initial value. The key is used during parsing/indexing, in particular to set
the source field during indexing. It is also the source field used at search time. As a convenience,
the chosen string is prepended with 'source::'.
- Note that Overriding the source key is generally not recommended. Typically, the input layer provides a
more accurate string to aid in problem analysis and investigation, accurately recording the file from
which the data was retrieved. Consider use of source types, tagging, and search wildcards before
overriding this value.
type: str
sourcetype:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
type: str
token:
description:
- Token value to use for SplunkTcpToken. If unspecified, a token is generated automatically.
type: str
password:
description:
- Server certificate password, if any.
- Only for TCP SSL configuration.
type: str
require_client_cert:
description:
- Determines whether a client must authenticate.
- Only for TCP SSL configuration.
type: str
root_ca:
description:
- Certificate authority list (root file).
- Only for TCP SSL configuration.
type: str
server_cert:
description:
- Full path to the server certificate.
- Only for TCP SSL configuration.
type: str
cipher_suite:
description:
- Specifies list of acceptable ciphers to use in ssl.
- Only obtained for TCP SSL configuration present on device.
type: str
running_config:
description:
- The module, by default, will connect to the remote device and retrieve the current
running-config to use as a base for comparing against the contents of source.
There are times when it is not desirable to have the task get the current running-config
for every task in a playbook. The I(running_config) argument allows the implementer
to pass in the configuration to use as the base config for comparison. This
value of this option should be the output received from device by executing
command.
type: str
state:
description:
- The state the configuration should be left in
type: str
choices:
- merged
- replaced
- deleted
- gathered
default: merged
author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
"""
EXAMPLES = """
# Using gathered
# --------------
- name: Gathering information about TCP Cooked Inputs
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: cooked
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "connection_host": "ip",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "8101"
# },
# {
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "9997"
# },
# {
# "connection_host": "ip",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8101",
# "restrict_to_host": "default"
# }
# ]
- name: Gathering information about TCP Cooked Inputs by Name
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: cooked
name: 9997
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "datatype": "cooked",
# "disabled": false,
# "host": "$decideOnStartup",
# "name": "9997",
# "protocol": "tcp"
# }
# ]
- name: Gathering information about TCP Raw Inputs
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: raw
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "connection_host": "ip",
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "8099",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 10
# },
# {
# "connection_host": "ip",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8100",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 10,
# "restrict_to_host": "default",
# "source": "test_source",
# "sourcetype": "test_source_type"
# }
# ]
- name: Gathering information about TCP Raw inputs by Name
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: raw
name: 8099
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "connection_host": "ip",
# "datatype": "raw",
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "8099",
# "protocol": "tcp",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 10
# }
# ]
- name: Gathering information about TCP SSL configuration
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: ssl
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "cipher_suite": <cipher-suites>,
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "test_host"
# }
# ]
- name: Gathering information about TCP SplunkTcpTokens
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: splunktcptoken
state: gathered
# RUN output:
# -----------
# "gathered": [
# {
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "splunktcptoken://test_token1",
# "token": <token1>
# },
# {
# "disabled": false,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "splunktcptoken://test_token2",
# "token": <token2>
# }
# ]
# Using merged
# ------------
- name: To add the TCP raw config
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: raw
name: 8100
connection_host: ip
disabled: True
raw_tcp_done_timeout: 9
restrict_to_host: default
queue: parsingQueue
source: test_source
sourcetype: test_source_type
state: merged
# RUN output:
# -----------
# "after": [
# {
# "connection_host": "ip",
# "datatype": "raw",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8100",
# "protocol": "tcp",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 9,
# "restrict_to_host": "default",
# "source": "test_source",
# "sourcetype": "test_source_type"
# }
# ],
# "before": [
# {
# "connection_host": "ip",
# "datatype": "raw",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8100",
# "protocol": "tcp",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 10,
# "restrict_to_host": "default",
# "source": "test_source",
# "sourcetype": "test_source_type"
# }
# ]
- name: To add the TCP cooked config
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: cooked
name: 8101
connection_host: ip
disabled: False
restrict_to_host: default
state: merged
# RUN output:
# -----------
# "after": [
# {
# "connection_host": "ip",
# "datatype": "cooked",
# "disabled": false,
# "host": "$decideOnStartup",
# "name": "default:8101",
# "protocol": "tcp",
# "restrict_to_host": "default"
# }
# ],
# "before": [
# {
# "connection_host": "ip",
# "datatype": "cooked",
# "disabled": true,
# "host": "$decideOnStartup",
# "name": "default:8101",
# "protocol": "tcp",
# "restrict_to_host": "default"
# }
# ],
- name: To add the Splunk TCP token
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: splunktcptoken
name: test_token
state: merged
# RUN output:
# -----------
# "after": [
# {
# "datatype": "splunktcptoken",
# "name": "splunktcptoken://test_token",
# "protocol": "tcp",
# "token": <token>
# }
# ],
# "before": [],
- name: To add the Splunk SSL
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: ssl
name: test_host
root_ca: {root CA directory}
server_cert: {server cretificate directory}
state: merged
# RUN output:
# -----------
# "after": [
# {
# "cipher_suite": <cipher suite>,
# "datatype": "ssl",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "test_host",
# "protocol": "tcp"
# }
# ],
# "before": []
# Using deleted
# -------------
- name: To Delete TCP Raw
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
datatype: raw
name: default:8100
state: deleted
# RUN output:
# -----------
# "after": [],
# "before": [
# {
# "connection_host": "ip",
# "datatype": "raw",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8100",
# "protocol": "tcp",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 9,
# "restrict_to_host": "default",
# "source": "test_source",
# "sourcetype": "test_source_type"
# }
# ]
# Using replaced
# --------------
- name: Replace existing data inputs networks configuration
register: result
splunk.es.splunk_data_inputs_network:
state: replaced
config:
- protocol: tcp
datatype: raw
name: 8100
connection_host: ip
disabled: True
host: "$decideOnStartup"
index: default
queue: parsingQueue
raw_tcp_done_timeout: 10
restrict_to_host: default
source: test_source
sourcetype: test_source_type
# RUN output:
# -----------
# "after": [
# {
# "connection_host": "ip",
# "datatype": "raw",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8100",
# "protocol": "tcp",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 9,
# "restrict_to_host": "default",
# "source": "test_source",
# "sourcetype": "test_source_type"
# }
# ],
# "before": [
# {
# "connection_host": "ip",
# "datatype": "raw",
# "disabled": true,
# "host": "$decideOnStartup",
# "index": "default",
# "name": "default:8100",
# "protocol": "tcp",
# "queue": "parsingQueue",
# "raw_tcp_done_timeout": 10,
# "restrict_to_host": "default",
# "source": "test_source",
# "sourcetype": "test_source_type"
# }
# ],
"""
RETURN = """
before:
description: The configuration prior to the module execution.
returned: when state is I(merged), I(replaced), I(deleted)
type: list
sample: >
This output will always be in the same format as the
module argspec.
after:
description: The resulting configuration after module execution.
returned: when changed
type: list
sample: >
This output will always be in the same format as the
module argspec.
gathered:
description: Facts about the network resource gathered from the remote device as structured data.
returned: when state is I(gathered)
type: dict
sample: >
This output will always be in the same format as the
module argspec.
"""