Init: mediaserver

This commit is contained in:
2023-02-08 12:13:28 +01:00
parent 848bc9739c
commit f7c23d4ba9
31914 changed files with 6175775 additions and 0 deletions

View File

@@ -0,0 +1,529 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for adaptive_response_notable_events
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
set_defaults,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_adaptive_response_notable_events import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = (
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
)
self.module_name = "adaptive_response_notable_events"
self.key_transform = {
"action.notable.param.default_owner": "default_owner",
"action.notable.param.default_status": "default_status",
"action.notable.param.drilldown_name": "drilldown_name",
"action.notable.param.drilldown_search": "drilldown_search",
"action.notable.param.drilldown_earliest_offset": "drilldown_earliest_offset",
"action.notable.param.drilldown_latest_offset": "drilldown_latest_offset",
"action.notable.param.extract_artifacts": "extract_artifacts",
"action.notable.param.investigation_profiles": "investigation_profiles",
"action.notable.param.next_steps": "next_steps",
"action.notable.param.recommended_actions": "recommended_actions",
"action.notable.param.rule_description": "description",
"action.notable.param.rule_title": "name",
"action.notable.param.security_domain": "security_domain",
"action.notable.param.severity": "severity",
"name": "correlation_search_name",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def fail_json(self, msg):
"""Replace the AnsibleModule fail_json here
:param msg: The message for the failure
:type msg: str
"""
msg = msg.replace("(basic.py)", self._task.action)
raise AnsibleActionFail(msg)
# need to store 'recommended_actions','extract_artifacts','next_steps' and 'investigation_profiles'
# since merging in the parsed form will eliminate any differences
def save_params(self, want_conf):
param_store = {}
if "recommended_actions" in want_conf:
param_store["recommended_actions"] = want_conf[
"recommended_actions"
]
if "extract_artifacts" in want_conf:
param_store["extract_artifacts"] = want_conf["extract_artifacts"]
if "next_steps" in want_conf:
param_store["next_steps"] = want_conf["next_steps"]
if "investigation_profiles" in want_conf:
param_store["investigation_profiles"] = want_conf[
"investigation_profiles"
]
return param_store
# responsible for correctly setting certain parameters depending on the state being triggered.
# These parameters are responsible for enabling and disabling notable response actions
def create_metadata(self, metadata, mode="add"):
if mode == "add":
if "actions" in metadata:
if metadata["actions"] == "notable":
pass
elif (
len(metadata["actions"].split(",")) > 0
and "notable" not in metadata["actions"]
):
metadata["actions"] = metadata["actions"] + ", notable"
else:
metadata["actions"] = "notable"
metadata["action.notable"] = "1"
elif mode == "delete":
if "actions" in metadata:
if metadata["actions"] == "notable":
metadata["actions"] = ""
elif (
len(metadata["actions"].split(",")) > 0
and "notable" in metadata["actions"]
):
tmp_list = metadata["actions"].split(",")
tmp_list.remove(" notable")
metadata["actions"] = ",".join(tmp_list)
metadata["action.notable"] = "0"
return metadata
def map_params_to_object(self, config):
res = {}
res["correlation_search_name"] = config["name"]
res.update(map_params_to_obj(config["content"], self.key_transform))
if "extract_artifacts" in res:
res["extract_artifacts"] = json.loads(res["extract_artifacts"])
if "investigation_profiles" in res:
if res["investigation_profiles"] == "{}":
res.pop("investigation_profiles")
else:
res["investigation_profiles"] = json.loads(
res["investigation_profiles"]
)
investigation_profiles = []
for keys in res["investigation_profiles"].keys():
investigation_profiles.append(keys.split("profile://")[1])
res["investigation_profiles"] = investigation_profiles
if "recommended_actions" in res:
res["recommended_actions"] = res["recommended_actions"].split(",")
if "next_steps" in res:
next_steps = json.loads(res["next_steps"])["data"]
next_steps = next_steps.split("]][[")
# trimming trailing characters
next_steps[0] = next_steps[0].strip("[")
next_steps[-1] = next_steps[-1].strip("]")
res["next_steps"] = []
for element in next_steps:
res["next_steps"].append(element.split("|")[1])
if "default_status" in res:
mapping = {
"0": "unassigned",
"1": "new",
"2": "in progress",
"3": "pending",
"4": "resolved",
"5": "closed",
}
res["default_status"] = mapping[res["default_status"]]
# need to store correlation search details for populating future request payloads
metadata = {}
metadata["search"] = config["content"]["search"]
metadata["actions"] = config["content"]["actions"]
return res, metadata
def map_objects_to_params(self, metadata, want_conf):
res = {}
res.update(map_obj_to_params(want_conf, self.key_transform))
res.update(self.create_metadata(metadata))
if "action.notable.param.extract_artifacts" in res:
res["action.notable.param.extract_artifacts"] = json.dumps(
res["action.notable.param.extract_artifacts"]
)
if "action.notable.param.recommended_actions" in res:
res["action.notable.param.recommended_actions"] = ",".join(
res["action.notable.param.recommended_actions"]
)
if "action.notable.param.investigation_profiles" in res:
investigation_profiles = {}
for element in res["action.notable.param.investigation_profiles"]:
investigation_profiles["profile://" + element] = {}
res["action.notable.param.investigation_profiles"] = json.dumps(
investigation_profiles
)
if "action.notable.param.next_steps" in res:
next_steps = ""
for next_step in res["action.notable.param.next_steps"]:
next_steps += "[[action|{0}]]".format(next_step)
# NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
next_steps_dict = {"version": 1, "data": next_steps}
res["action.notable.param.next_steps"] = json.dumps(
next_steps_dict
)
if "action.notable.param.default_status" in res:
mapping = {
"unassigned": "0",
"new": "1",
"in progress": "2",
"pending": "3",
"resolved": "4",
"closed": "5",
}
res["action.notable.param.default_status"] = mapping[
res["action.notable.param.default_status"]
]
# need to remove 'name', otherwise the API call will try to modify the correlation search
res.pop("name")
return res
def search_for_resource_name(self, conn_request, correlation_search_name):
query_dict = conn_request.get_by_path(
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
)
search_result = {}
if query_dict:
search_result, metadata = self.map_params_to_object(
query_dict["entry"][0]
)
else:
raise AnsibleActionFail(
"Correlation Search '{0}' doesn't exist".format(
correlation_search_name
)
)
return search_result, metadata
# Since there is no delete operation associated with an action,
# The delete operation will unset the relevant fields
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
search_by_name, metadata = self.search_for_resource_name(
conn_request, want_conf["correlation_search_name"]
)
search_by_name = utils.remove_empties(search_by_name)
# Compare obtained values with a dict representing values in a 'deleted' state
diff_cmp = {
"correlation_search_name": want_conf[
"correlation_search_name"
],
"drilldown_earliest_offset": "$info_min_time$",
"drilldown_latest_offset": "$info_max_time$",
}
# if the obtained values are different from 'deleted' state values
if search_by_name and search_by_name != diff_cmp:
before.append(search_by_name)
payload = {
"action.notable.param.default_owner": "",
"action.notable.param.default_status": "",
"action.notable.param.drilldown_name": "",
"action.notable.param.drilldown_search": "",
"action.notable.param.drilldown_earliest_offset": "$info_min_time$",
"action.notable.param.drilldown_latest_offset": "$info_max_time$",
"action.notable.param.extract_artifacts": "{}",
"action.notable.param.investigation_profiles": "{}",
"action.notable.param.next_steps": "",
"action.notable.param.recommended_actions": "",
"action.notable.param.rule_description": "",
"action.notable.param.rule_title": "",
"action.notable.param.security_domain": "",
"action.notable.param.severity": "",
}
payload.update(self.create_metadata(metadata, mode="delete"))
url = "{0}/{1}".format(
self.api_object,
quote(want_conf["correlation_search_name"]),
)
conn_request.create_update(
url,
data=payload,
)
changed = True
after = []
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
defaults = {
"drilldown_earliest_offset": "$info_min_time$",
"drilldown_latest_offset": "$info_max_time$",
"extract_artifacts": {
"asset": [
"src",
"dest",
"dvc",
"orig_host",
],
"identity": [
"src_user",
"user",
"src_user_id",
"src_user_role",
"user_id",
"user_role",
"vendor_account",
],
},
"investigation_profiles": "{}",
}
remove_from_diff_compare = []
for want_conf in config:
have_conf, metadata = self.search_for_resource_name(
conn_request, want_conf["correlation_search_name"]
)
correlation_search_name = want_conf["correlation_search_name"]
if "notable" in metadata["actions"]:
want_conf = set_defaults(want_conf, defaults)
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
# need to store 'recommended_actions','extract_artifacts'
# 'next_steps' and 'investigation_profiles'
# since merging in the parsed form will eliminate any differences
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
# restoring parameters
want_conf.update(param_store)
changed = True
payload = self.map_objects_to_params(
metadata, want_conf
)
url = "{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json, metadata = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
conn_request=conn_request, config=[want_conf]
)
changed = True
payload = self.map_objects_to_params(
metadata, want_conf
)
url = "{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json, metadata = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
payload = self.map_objects_to_params(metadata, want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json, metadata = self.map_params_to_object(
api_response["entry"][0]
)
after.extend(before)
after.append(response_json)
if not changed:
after = None
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
self._result[self.module_name] = {}
# config is retrieved as a string; need to deserialise
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
action_module=self,
connection=conn,
not_rest_data_keys=["state"],
)
if self._task.args["state"] == "gathered":
if config:
self._result["changed"] = False
self._result[self.module_name]["gathered"] = []
for item in config:
self._result[self.module_name]["gathered"].append(
self.search_for_resource_name(
conn_request, item["correlation_search_name"]
)[0]
)
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
(
self._result[self.module_name],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
elif self._task.args["state"] == "deleted":
(
self._result[self.module_name],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
return self._result

View File

@@ -0,0 +1,435 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for splunk_correlation_searches
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
set_defaults,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = (
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
)
self.module_name = "correlation_searches"
self.key_transform = {
"disabled": "disabled",
"name": "name",
"description": "description",
"search": "search",
"action.correlationsearch.annotations": "annotations",
"request.ui_dispatch_app": "ui_dispatch_context",
"dispatch.earliest_time": "time_earliest",
"dispatch.latest_time": "time_latest",
"cron_schedule": "cron_schedule",
"realtime_schedule": "scheduling",
"schedule_window": "schedule_window",
"schedule_priority": "schedule_priority",
"alert.digest_mode": "trigger_alert",
"alert_type": "trigger_alert_when",
"alert_comparator": "trigger_alert_when_condition",
"alert_threshold": "trigger_alert_when_value",
"alert.suppress": "suppress_alerts",
"alert.suppress.period": "throttle_window_duration",
"alert.suppress.fields": "throttle_fields_to_group_by",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def fail_json(self, msg):
"""Replace the AnsibleModule fail_json here
:param msg: The message for the failure
:type msg: str
"""
msg = msg.replace("(basic.py)", self._task.action)
raise AnsibleActionFail(msg)
# need to store 'annotations' and 'throttle_fields_to_group_by'
# since merging in the parsed form will eliminate any differences
# This is because these fields are getting converted from strings
# to lists/dictionaries, and so these fields need to be compared
# as such
def save_params(self, want_conf):
param_store = {}
if "annotations" in want_conf:
param_store["annotations"] = want_conf["annotations"]
if "throttle_fields_to_group_by" in want_conf:
param_store["throttle_fields_to_group_by"] = want_conf[
"throttle_fields_to_group_by"
]
return param_store
def map_params_to_object(self, config):
res = {}
res["app"] = config["acl"]["app"]
res.update(map_params_to_obj(config["content"], self.key_transform))
res.update(map_params_to_obj(config, self.key_transform))
if "scheduling" in res:
if res["scheduling"]:
res["scheduling"] = "realtime"
else:
res["scheduling"] = "continuous"
if "trigger_alert" in res:
if res["trigger_alert"]:
res["trigger_alert"] = "once"
else:
res["trigger_alert"] = "for each result"
if "throttle_fields_to_group_by" in res:
res["throttle_fields_to_group_by"] = res[
"throttle_fields_to_group_by"
].split(",")
if "annotations" in res:
res["annotations"] = json.loads(res["annotations"])
custom = []
# need to check for custom annotation frameworks
for k, v in res["annotations"].items():
if k in {"cis20", "nist", "mitre_attack", "kill_chain_phases"}:
continue
entry = {}
entry["framework"] = k
entry["custom_annotations"] = v
custom.append(entry)
if custom:
for entry in custom:
res["annotations"].pop(entry["framework"])
res["annotations"]["custom"] = custom
return res
def map_objects_to_params(self, want_conf):
res = {}
# setting parameters that enable correlation search
res["action.correlationsearch.enabled"] = "1"
res["is_scheduled"] = True
res["dispatch.rt_backfill"] = True
res["action.correlationsearch.label"] = want_conf["name"]
res.update(map_obj_to_params(want_conf, self.key_transform))
if "realtime_schedule" in res:
if res["realtime_schedule"] == "realtime":
res["realtime_schedule"] = True
else:
res["realtime_schedule"] = False
if "alert.digest_mode" in res:
if res["alert.digest_mode"] == "once":
res["alert.digest_mode"] = True
else:
res["alert.digest_mode"] = False
if "alert.suppress.fields" in res:
res["alert.suppress.fields"] = ",".join(
res["alert.suppress.fields"]
)
if (
"action.correlationsearch.annotations" in res
and "custom" in res["action.correlationsearch.annotations"]
):
for ele in res["action.correlationsearch.annotations"]["custom"]:
res["action.correlationsearch.annotations"][
ele["framework"]
] = ele["custom_annotations"]
res["action.correlationsearch.annotations"].pop("custom")
res["action.correlationsearch.annotations"] = json.dumps(
res["action.correlationsearch.annotations"]
)
return res
def search_for_resource_name(self, conn_request, correlation_search_name):
query_dict = conn_request.get_by_path(
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
)
)
search_result = {}
if query_dict:
search_result = self.map_params_to_object(query_dict["entry"][0])
return search_result
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if search_by_name:
before.append(search_by_name)
url = "{0}/{1}".format(
self.api_object,
quote(want_conf["name"]),
)
conn_request.delete_by_path(
url,
)
changed = True
after = []
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
defaults = {}
remove_from_diff_compare = []
for want_conf in config:
have_conf = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if have_conf:
want_conf = set_defaults(want_conf, defaults)
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
name = want_conf["name"]
before.append(have_conf)
if self._task.args["state"] == "merged":
# need to store 'annotations' and 'throttle_group_by_field'
# since merging in the parsed form will eliminate any differences
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
# restoring parameters
want_conf.update(param_store)
changed = True
payload = self.map_objects_to_params(want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(name),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
conn_request=conn_request, config=[want_conf]
)
changed = True
payload = self.map_objects_to_params(want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(name),
)
# while creating new correlation search, this is how to set the 'app' field
if "app" in want_conf:
url = url.replace(
"SplunkEnterpriseSecuritySuite",
want_conf["app"],
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
name = want_conf["name"]
payload = self.map_objects_to_params(want_conf)
url = "{0}/{1}".format(
self.api_object,
quote(name),
)
# while creating new correlation search, this is how to set the 'app' field
if "app" in want_conf:
url = url.replace(
"SplunkEnterpriseSecuritySuite", want_conf["app"]
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.extend(before)
after.append(response_json)
if not changed:
after = None
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
self._result[self.module_name] = {}
# config is retrieved as a string; need to deserialise
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
action_module=self,
connection=conn,
not_rest_data_keys=["state"],
)
if self._task.args["state"] == "gathered":
if config:
self._result["changed"] = False
self._result["gathered"] = []
for item in config:
result = self.search_for_resource_name(
conn_request, item["name"]
)
if result:
self._result["gathered"].append(result)
for item in config:
self._result["gathered"].append(
self.search_for_resource_name(
conn_request, item["name"]
)
)
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
(
self._result[self.module_name],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
elif self._task.args["state"] == "deleted":
(
self._result[self.module_name],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
return self._result

View File

@@ -0,0 +1,313 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for data_inputs_monitor
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
set_defaults,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = "servicesNS/nobody/search/data/inputs/monitor"
self.module_name = "data_inputs_monitor"
self.key_transform = {
"blacklist": "blacklist",
"check-index": "check_index", # not returned
"check-path": "check_path", # not returned
"crc-salt": "crc_salt",
"disabled": "disabled",
"followTail": "follow_tail",
"host": "host",
"host_regex": "host_regex",
"host_segment": "host_segment",
"ignore-older-than": "ignore_older_than", # not returned
"index": "index",
"name": "name",
"recursive": "recursive",
"rename-source": "rename_source", # not returned
"sourcetype": "sourcetype",
"time-before-close": "time_before_close", # not returned
"whitelist": "whitelist",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def map_params_to_object(self, config):
res = {}
res["name"] = config["name"]
# splunk takes "crc-salt" as input parameter, and returns "crcSalt" in output
# therefore we can't directly use mapping
if config["content"].get("crcSalt"):
config["content"]["crc-salt"] = config["content"]["crcSalt"]
res.update(map_params_to_obj(config["content"], self.key_transform))
return res
def search_for_resource_name(self, conn_request, directory_name):
query_dict = conn_request.get_by_path(
"{0}/{1}".format(self.api_object, quote_plus(directory_name))
)
search_result = {}
if query_dict:
search_result = self.map_params_to_object(query_dict["entry"][0])
return search_result
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if search_by_name:
before.append(search_by_name)
conn_request.delete_by_path(
"{0}/{1}".format(
self.api_object, quote_plus(want_conf["name"])
)
)
changed = True
after = []
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
defaults = {
"disabled": False,
"host": "$decideOnStartup",
"index": "default",
}
remove_from_diff_compare = [
"check_path",
"check_index",
"ignore_older_than",
"time_before_close",
"rename_source",
]
for want_conf in config:
have_conf = self.search_for_resource_name(
conn_request, want_conf["name"]
)
if have_conf:
want_conf = set_defaults(want_conf, defaults)
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
diff = remove_get_keys_from_payload_dict(
diff, remove_from_diff_compare
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
url = "{0}/{1}".format(
self.api_object,
quote_plus(payload.pop("name")),
)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
conn_request.delete_by_path(
"{0}/{1}".format(
self.api_object,
quote_plus(want_conf["name"]),
)
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
url = "{0}".format(self.api_object)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
payload = map_obj_to_params(want_conf, self.key_transform)
url = "{0}".format(self.api_object)
api_response = conn_request.create_update(
url,
data=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0]
)
after.extend(before)
after.append(response_json)
if not changed:
after = None
res_config = {}
res_config["after"] = after
res_config["before"] = before
return res_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
# self._result[self.module_name] = {}
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
action_module=self,
connection=conn,
not_rest_data_keys=["state"],
)
if self._task.args["state"] == "gathered":
if config:
self._result["gathered"] = []
self._result["changed"] = False
for item in config:
result = self.search_for_resource_name(
conn_request, item["name"]
)
if result:
self._result["gathered"].append(result)
else:
self._result["gathered"] = conn_request.get_by_path(
self.api_object
)["entry"]
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
(
self._result[self.module_name],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
elif self._task.args["state"] == "deleted":
(
self._result[self.module_name],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_name]["after"] is None:
self._result[self.module_name].pop("after")
return self._result

View File

@@ -0,0 +1,538 @@
#
# Copyright 2022 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
The module file for data_inputs_network
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import (
DOCUMENTATION,
)
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
self.api_object = "servicesNS/nobody/search/data/inputs"
self.module_return = "data_inputs_network"
self.key_transform = {
"name": "name",
"connection_host": "connection_host",
"disabled": "disabled",
"index": "index",
"host": "host",
"no_appending_timestamp": "no_appending_timestamp",
"no_priority_stripping": "no_priority_stripping",
"rawTcpDoneTimeout": "raw_tcp_done_timeout",
"restrictToHost": "restrict_to_host",
"queue": "queue",
"SSL": "ssl",
"source": "source",
"sourcetype": "sourcetype",
"token": "token",
"password": "password",
"requireClientCert": "require_client_cert",
"rootCA": "root_ca",
"serverCert": "server_cert",
"cipherSuite": "cipher_suite",
}
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
self._result["failed"] = True
self._result["msg"] = errors
def fail_json(self, msg):
"""Replace the AnsibleModule fail_json here
:param msg: The message for the failure
:type msg: str
"""
msg = msg.replace("(basic.py)", self._task.action)
raise AnsibleActionFail(msg)
def map_params_to_object(self, config, datatype=None):
res = {}
res["name"] = config["name"]
res.update(map_params_to_obj(config["content"], self.key_transform))
# API returns back "index", even though it can't be set within /tcp/cooked
if datatype:
if datatype == "cooked" and "index" in res:
res.pop("index")
elif datatype == "splunktcptoken":
if "index" in res:
res.pop("index")
if "host" in res:
res.pop("host")
if "disabled" in res:
res.pop("disabled")
return res
# This function is meant to construct the URL and handle GET, POST and DELETE calls
# depending on th context. The URLs constructed and handled are:
# /tcp/raw[/{name}]
# /tcp/cooked[/{name}]
# /tcp/splunktcptoken[/{name}]
# /tcp/ssl[/{name}]
# /udp[/{name}]
def request_by_path(
self,
conn_request,
protocol,
datatype=None,
name=None,
req_type="get",
payload=None,
):
query_dict = None
url = ""
if protocol == "tcp":
if not datatype:
raise AnsibleActionFail("No datatype specified for TCP input")
# In all cases except "ssl" datatype, creation of objects is handled
# by a POST request to the parent directory. Therefore name shouldn't
# be included in the URL.
if not name or (req_type == "post_create" and datatype != "ssl"):
name = ""
url = "{0}/{1}/{2}/{3}".format(
self.api_object,
protocol,
datatype,
quote_plus(str(name)),
)
# if no "name" was provided
if url[-1] == "/":
url = url[:-1]
elif protocol == "udp":
if datatype:
raise AnsibleActionFail("Datatype specified for UDP input")
if not name or req_type == "post_create":
name = ""
url = "{0}/{1}/{2}".format(
self.api_object,
protocol,
quote_plus(str(name)),
)
# if no "name" was provided
if url[-1] == "/":
url = url[:-1]
else:
raise AnsibleActionFail(
"Incompatible protocol specified. Please specify 'tcp' or 'udp'"
)
if req_type == "get":
query_dict = conn_request.get_by_path(url)
elif req_type == "post_create":
query_dict = conn_request.create_update(url, data=payload)
elif req_type == "post_update":
payload.pop("name")
query_dict = conn_request.create_update(url, data=payload)
elif req_type == "delete":
query_dict = conn_request.delete_by_path(url)
return query_dict
def search_for_resource_name(self, conn_request, protocol, datatype, name):
query_dict = self.request_by_path(
conn_request,
protocol,
datatype,
name,
)
search_result = {}
if query_dict:
search_result = self.map_params_to_object(
query_dict["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
search_result["protocol"] = protocol
if datatype:
search_result["datatype"] = datatype
if datatype == "ssl":
search_result["name"] = name
return search_result
# If certain parameters are present, Splunk appends the value of those parameters
# to the name. Therefore this causes idempotency to fail. This function looks for
# said parameters and conducts checks to see if the configuration already exists.
def parse_config(self, conn_request, want_conf):
old_name = None
protocol = want_conf["protocol"]
datatype = want_conf.get("datatype")
if not want_conf.get("name"):
raise AnsibleActionFail("No name specified for merge action")
else:
# Int values confuse diff
want_conf["name"] = str(want_conf["name"])
old_name = want_conf["name"]
if (
want_conf.get("restrict_to_host")
and old_name.split(":")[0] == want_conf["restrict_to_host"]
):
old_name = old_name.split(":")[1]
# If "restrictToHost" parameter is set, the value of this parameter is appended
# to the numerical name meant to represent port number
if (
want_conf.get("restrict_to_host")
and want_conf["restrict_to_host"] not in want_conf["name"]
):
want_conf["name"] = "{0}:{1}".format(
want_conf["restrict_to_host"], want_conf["name"]
)
# If datatype is "splunktcptoken", the value "splunktcptoken://" is appended
# to the name
elif (
datatype
and datatype == "splunktcptoken"
and "splunktcptoken://" not in want_conf["name"]
):
want_conf["name"] = "{0}{1}".format(
"splunktcptoken://", want_conf["name"]
)
name = want_conf["name"]
# If the above parameters are present, but the object doesn't exist
# the value of the parameters shouldn't be prepended to the name.
# Otherwise Splunk returns 400. This check is takes advantage of this
# and sets the correct name.
have_conf = None
try:
have_conf = self.search_for_resource_name(
conn_request,
protocol,
datatype,
name,
)
# while creating new conf, we need to only use numerical values
# splunk will later append param value to it.
if not have_conf:
want_conf["name"] = old_name
except AnsibleActionFail:
want_conf["name"] = old_name
have_conf = self.search_for_resource_name(
conn_request,
protocol,
datatype,
old_name,
)
# SSL response returns a blank "name" parameter, which causes problems
if datatype == "ssl":
have_conf["name"] = want_conf["name"]
return have_conf, protocol, datatype, name, old_name
def delete_module_api_config(self, conn_request, config):
before = []
after = None
changed = False
for want_conf in config:
if not want_conf.get("name"):
raise AnsibleActionFail("No name specified")
have_conf, protocol, datatype, name, _old_name = self.parse_config(
conn_request, want_conf
)
if protocol == "tcp" and datatype == "ssl":
raise AnsibleActionFail("Deleted state not supported for SSL")
if have_conf:
before.append(have_conf)
self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="delete",
)
changed = True
after = []
ret_config = {}
ret_config["before"] = before
ret_config["after"] = after
return ret_config, changed
def configure_module_api(self, conn_request, config):
before = []
after = []
changed = False
for want_conf in config:
# Add to the THIS list for the value which needs to be excluded
# from HAVE params when compared to WANT param like 'ID' can be
# part of HAVE param but may not be part of your WANT param
remove_from_diff_compare = [
"datatype",
"protocol",
"cipher_suite",
]
have_conf, protocol, datatype, name, old_name = self.parse_config(
conn_request, want_conf
)
if (
protocol == "tcp"
and datatype == "ssl"
and self._task.args["state"] == "replaced"
):
raise AnsibleActionFail("Replaced state not supported for SSL")
if have_conf:
want_conf = utils.remove_empties(want_conf)
diff = utils.dict_diff(have_conf, want_conf)
# Check if have_conf has extra parameters
if self._task.args["state"] == "replaced":
diff2 = utils.dict_diff(want_conf, have_conf)
if len(diff) or len(diff2):
diff.update(diff2)
if diff:
diff = remove_get_keys_from_payload_dict(
diff, remove_from_diff_compare
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
want_conf = utils.remove_empties(
utils.dict_merge(have_conf, want_conf)
)
want_conf = remove_get_keys_from_payload_dict(
want_conf, remove_from_diff_compare
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="post_update",
payload=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
response_json["protocol"] = protocol
if datatype:
response_json["datatype"] = datatype
after.append(response_json)
elif self._task.args["state"] == "replaced":
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="delete",
)
changed = True
payload = map_obj_to_params(
want_conf, self.key_transform
)
# while creating new conf, we need to only use numerical values
# splunk will later append param value to it.
payload["name"] = old_name
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="post_create",
payload=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
response_json["protocol"] = protocol
if datatype:
response_json["datatype"] = datatype
after.append(response_json)
else:
before.append(have_conf)
after.append(have_conf)
else:
before.append(have_conf)
after.append(have_conf)
else:
changed = True
want_conf = utils.remove_empties(want_conf)
payload = map_obj_to_params(want_conf, self.key_transform)
api_response = self.request_by_path(
conn_request,
protocol,
datatype,
name,
req_type="post_create",
payload=payload,
)
response_json = self.map_params_to_object(
api_response["entry"][0], datatype
)
# Adding back protocol and datatype fields for better clarity
response_json["protocol"] = protocol
if datatype:
response_json["datatype"] = datatype
after.extend(before)
after.append(response_json)
if not changed:
after = None
ret_config = {}
ret_config["before"] = before
ret_config["after"] = after
return ret_config, changed
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
self._check_argspec()
if self._result.get("failed"):
return self._result
config = self._task.args.get("config")
conn = Connection(self._connection.socket_path)
conn_request = SplunkRequest(
connection=conn,
action_module=self,
)
if self._task.args["state"] == "gathered":
if config:
self._result["gathered"] = []
self._result["changed"] = False
for item in config:
if item.get("name"):
result = self.search_for_resource_name(
conn_request,
item["protocol"],
item.get("datatype"),
item.get("name"),
)
if result:
self._result["gathered"].append(result)
else:
response_list = self.request_by_path(
conn_request,
item["protocol"],
item.get("datatype"),
None,
)["entry"]
self._result["gathered"] = []
for response_dict in response_list:
self._result["gathered"].append(
self.map_params_to_object(response_dict),
)
else:
raise AnsibleActionFail("No protocol specified")
elif (
self._task.args["state"] == "merged"
or self._task.args["state"] == "replaced"
):
if config:
(
self._result[self.module_return],
self._result["changed"],
) = self.configure_module_api(conn_request, config)
if not self._result[self.module_return]["after"]:
self._result[self.module_return].pop("after")
elif self._task.args["state"] == "deleted":
if config:
(
self._result[self.module_return],
self._result["changed"],
) = self.delete_module_api_config(conn_request, config)
if self._result[self.module_return]["after"] is None:
self._result[self.module_return].pop("after")
return self._result