Organization and new functions for pandoraPluginTools
This commit is contained in:
parent
b526b10200
commit
2ca114f844
|
@ -1,52 +1,31 @@
|
|||
# Python: module plugintools for PandoraFMS Developers
|
||||
|
||||
pandoraPluginTools is a library that aims to help the creation of scripts and their integration in PandoraFMS.
|
||||
pandoraPluginTools is a library that aims to help the creation of scripts and their integration in Pandora FMS.
|
||||
|
||||
[PluginTools Reference Documentation](https://pandorafms.com/guides/public/books/plugintools)
|
||||
|
||||
The package includes the following modules: agents, modules, transfer, general, discovery and http. Each one has different requirements and functions that facilitate and automate the data integration in PandoraFMS. They have the following dependencies :
|
||||
|
||||
**agents**
|
||||
Module that contains functions oriented to the creation of agents.
|
||||
- datetime.datetime
|
||||
- subprocess.Popen
|
||||
- Hashlib
|
||||
- sys
|
||||
- os
|
||||
- print_module
|
||||
- print_log_module
|
||||
|
||||
**modules**
|
||||
Module that contains functions oriented to the creation of modules.
|
||||
|
||||
**transfer**
|
||||
Module containing functions oriented to file transfer and data sending.
|
||||
- datetime.datetime
|
||||
- subprocess.Popen
|
||||
- shutil
|
||||
- sys
|
||||
- os
|
||||
- print_agent
|
||||
The package includes the following modules. Each one has different functions that facilitate and automate the data integration in Pandora FMS:
|
||||
|
||||
**general**
|
||||
Module containing general purpose functions, useful in the creation of plugins for PandoraFMS.
|
||||
- datetime.datetime
|
||||
- hashlib
|
||||
- json
|
||||
- sys
|
||||
Module containing general purpose functions, useful in the creation of plugins for PandoraFMS
|
||||
|
||||
**threads**
|
||||
Module containing threading purpose functions, useful to run parallel functions.
|
||||
|
||||
**agents**
|
||||
Module that contains functions oriented to the creation of Pandora FMS agents
|
||||
|
||||
**modules**
|
||||
Module that contains functions oriented to the creation of Pandora FMS modules.
|
||||
|
||||
**transfer**
|
||||
Module containing functions oriented to file transfer and data sending to Pandora FMS server
|
||||
|
||||
**discovery**
|
||||
Module that contains general purpose functions, useful in the creation of plugins for PandoraFMS discovery.
|
||||
- json
|
||||
- sys
|
||||
Module containing functions oriented to the creation of Pandora FMS discovery plugins
|
||||
|
||||
**http**
|
||||
Module that contains general purpose functions, useful in the creation of plugins for PandoraFMS discovery.
|
||||
- requests_ntlm.HttpNtlmAuth
|
||||
- requests.auth.HTTPBasicAuth
|
||||
- requests.auth.HTTPDigestAuth
|
||||
- requests.sessions.Session
|
||||
|
||||
Module containing functions oriented to HTTP API calls
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -56,7 +35,7 @@ import pandoraPluginTools as ppt
|
|||
## Define agent
|
||||
server_name = "WIN-SERV"
|
||||
|
||||
agent=ppt.agents.init_agent()
|
||||
agent=ppt.init_agent()
|
||||
agent.update(
|
||||
agent_name = ppt.generate_md5(server_name),
|
||||
agent_alias = server_name,
|
||||
|
@ -86,3 +65,21 @@ ppt.transfer_xml(
|
|||
)
|
||||
```
|
||||
|
||||
The package has the following dependencies:
|
||||
- Hashlib
|
||||
- datetime.datetime
|
||||
- hashlib
|
||||
- json
|
||||
- os
|
||||
- print_agent
|
||||
- print_log_module
|
||||
- print_module
|
||||
- queue.Queue
|
||||
- requests.auth.HTTPBasicAuth
|
||||
- requests.auth.HTTPDigestAuth
|
||||
- requests.sessions.Session
|
||||
- requests_ntlm.HttpNtlmAuth
|
||||
- shutil
|
||||
- subprocess.Popen
|
||||
- sys
|
||||
- threading.Thread
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from .general import *
|
||||
from .threads import *
|
||||
from .agents import *
|
||||
from .modules import *
|
||||
from .transfer import *
|
||||
from .discovery import *
|
||||
from .http import *
|
||||
from .general import *
|
||||
|
|
|
@ -3,15 +3,22 @@ from subprocess import *
|
|||
import hashlib
|
||||
import sys
|
||||
import os
|
||||
from .general import now,set_dict_key_value
|
||||
from .modules import print_module,print_log_module
|
||||
from .transfer import write_xml
|
||||
|
||||
####
|
||||
# Define global variables dict, used in functions as default values.
|
||||
# Its values can be changed.
|
||||
#########################################################################################
|
||||
|
||||
global_variables = {
|
||||
'temporal' : '/tmp',
|
||||
'agents_group_name': '',
|
||||
'interval' : 300
|
||||
'agents_group_name' : '',
|
||||
'interval' : 300
|
||||
}
|
||||
#########################################################################################
|
||||
# OS check
|
||||
|
||||
####
|
||||
# Define some global variables
|
||||
#########################################################################################
|
||||
|
||||
POSIX = os.name == "posix"
|
||||
|
@ -28,9 +35,9 @@ AIX = sys.platform.startswith("aix")
|
|||
|
||||
####
|
||||
# Set a global variable with the specified name and assigns a value to it.
|
||||
###########################################
|
||||
#########################################################################################
|
||||
def set_global_variable(
|
||||
variable_name,
|
||||
variable_name: str = "",
|
||||
value
|
||||
):
|
||||
"""
|
||||
|
@ -40,79 +47,33 @@ def set_global_variable(
|
|||
variable_name (str): Name of the variable to set.
|
||||
value (any): Value to assign to the variable.
|
||||
"""
|
||||
|
||||
global_variables[variable_name] = value
|
||||
set_dict_key_value(global_variables, variable_name, value)
|
||||
|
||||
####
|
||||
# Prints agent XML. Requires agent conf
|
||||
# (dict) and modules (list) as arguments.
|
||||
###########################################
|
||||
def print_agent(
|
||||
agent,
|
||||
modules,
|
||||
temp_dir=global_variables['temporal'],
|
||||
log_modules= None,
|
||||
print_flag = None
|
||||
):
|
||||
"""Prints agent XML. Requires agent conf (dict) and modules (list) as arguments.
|
||||
- Use print_flag to show modules' XML in STDOUT.
|
||||
- Returns a tuple (xml, data_file).
|
||||
# Agent class
|
||||
#########################################################################################
|
||||
|
||||
class Agent:
|
||||
"""
|
||||
data_file=None
|
||||
Basic agent class. Requires agent parameters (config {dictionary})
|
||||
and module definition (modules_def [list of dictionaries])
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
config: dict = None,
|
||||
modules_def: list = []
|
||||
):
|
||||
|
||||
header = "<?xml version='1.0' encoding='UTF-8'?>\n"
|
||||
header += "<agent_data"
|
||||
for dato in agent:
|
||||
header += " " + str(dato) + "='" + str(agent[dato]) + "'"
|
||||
header += ">\n"
|
||||
xml = header
|
||||
if modules :
|
||||
for module in modules:
|
||||
modules_xml = print_module(module)
|
||||
xml += str(modules_xml)
|
||||
if log_modules :
|
||||
for log_module in log_modules:
|
||||
modules_xml = print_log_module(log_module)
|
||||
xml += str(modules_xml)
|
||||
xml += "</agent_data>"
|
||||
if not print_flag:
|
||||
data_file = write_xml(xml, agent["agent_name"], temp_dir)
|
||||
else:
|
||||
print(xml)
|
||||
|
||||
return (xml,data_file)
|
||||
if config is None:
|
||||
config = init_agent()
|
||||
|
||||
####
|
||||
# Creates a agent .data file in the
|
||||
# specified data_dir folder
|
||||
###########################################
|
||||
def write_xml(
|
||||
xml,
|
||||
agent_name,
|
||||
data_dir=global_variables['temporal']
|
||||
):
|
||||
"""Creates a agent .data file in the specified data_dir folder\n
|
||||
Args:
|
||||
- xml (str): XML string to be written in the file.
|
||||
- agent_name (str): agent name for the xml and file name.
|
||||
- data_dir (str): folder in which the file will be created."""
|
||||
|
||||
Utime = datetime.now().strftime('%s')
|
||||
agent_name_md5 = (hashlib.md5(agent_name.encode()).hexdigest())
|
||||
data_file = "%s/%s.%s.data" %(str(data_dir),agent_name_md5,str(Utime))
|
||||
try:
|
||||
with open(data_file, 'x') as data:
|
||||
data.write(xml)
|
||||
except OSError as o:
|
||||
print(f"ERROR - Could not write file: {o}, please check directory permissions", file=sys.stderr)
|
||||
except Exception as e:
|
||||
print(f"{type(e).__name__}: {e}", file=sys.stderr)
|
||||
return (data_file)
|
||||
self.config = config
|
||||
self.modules_def = modules_def
|
||||
|
||||
####
|
||||
# Init agent template
|
||||
###########################################
|
||||
def init_agent() :
|
||||
#########################################################################################
|
||||
def init_agent() -> dict:
|
||||
"""
|
||||
Initializes an agent template with default values.
|
||||
|
||||
|
@ -120,33 +81,58 @@ def init_agent() :
|
|||
dict: Dictionary representing the agent template with default values.
|
||||
"""
|
||||
agent = {
|
||||
"agent_name" : "",
|
||||
"agent_alias" : "",
|
||||
"agent_name" : "",
|
||||
"agent_alias" : "",
|
||||
"parent_agent_name" : "",
|
||||
"description" : "",
|
||||
"version" : "",
|
||||
"os_name" : "",
|
||||
"os_version" : "",
|
||||
"timestamp" : datetime.today().strftime('%Y/%m/%d %H:%M:%S'),
|
||||
"address" : "",
|
||||
"group" : global_variables['agents_group_name'],
|
||||
"interval" : global_variables['interval'],
|
||||
"agent_mode" : "1",
|
||||
}
|
||||
"description" : "",
|
||||
"version" : "",
|
||||
"os_name" : "",
|
||||
"os_version" : "",
|
||||
"timestamp" : now(),
|
||||
"address" : "",
|
||||
"group" : global_variables['agents_group_name'],
|
||||
"interval" : global_variables['interval'],
|
||||
"agent_mode" : "1",
|
||||
}
|
||||
|
||||
return agent
|
||||
|
||||
|
||||
#########################################################################################
|
||||
# Agent class
|
||||
####
|
||||
# Prints agent XML. Requires agent conf (dict) and modules (list) as arguments.
|
||||
#########################################################################################
|
||||
def print_agent(
|
||||
agent: dict = None,
|
||||
modules: list = [],
|
||||
log_modules: list = [],
|
||||
print_flag: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
Prints agent XML. Requires agent conf (dict) and modules (list) as arguments.
|
||||
- Use print_flag to show modules' XML in STDOUT.
|
||||
- Returns xml (str).
|
||||
"""
|
||||
xml = ""
|
||||
data_file = None
|
||||
|
||||
class Agent:
|
||||
"""Basic agent class. Requires agent parameters (config {dictionary})
|
||||
and module definition (modules_def [list of dictionaries]) """
|
||||
def __init__(
|
||||
self,
|
||||
config,
|
||||
modules_def
|
||||
):
|
||||
self.config = config
|
||||
self.modules_def = modules_def
|
||||
if agent is not None:
|
||||
header = "<?xml version='1.0' encoding='UTF-8'?>\n"
|
||||
header += "<agent_data"
|
||||
for dato in agent:
|
||||
header += " " + str(dato) + "='" + str(agent[dato]) + "'"
|
||||
header += ">\n"
|
||||
xml = header
|
||||
|
||||
for module in modules:
|
||||
modules_xml = print_module(module)
|
||||
xml += str(modules_xml)
|
||||
|
||||
for log_module in log_modules:
|
||||
modules_xml = print_log_module(log_module)
|
||||
xml += str(modules_xml)
|
||||
|
||||
xml += "</agent_data>"
|
||||
|
||||
if print_flag:
|
||||
print(xml)
|
||||
|
||||
return xml
|
||||
|
|
|
@ -1,194 +0,0 @@
|
|||
import sys
|
||||
import json
|
||||
|
||||
####
|
||||
# Set fixed value to summary key
|
||||
###########################################
|
||||
def set_summary_value(
|
||||
key="",
|
||||
value=""
|
||||
):
|
||||
"""
|
||||
Sets a fixed value for a key in the 'summary' dictionary.
|
||||
|
||||
Args:
|
||||
key (str): Key to set the value for.
|
||||
value (any): Value to assign to the key.
|
||||
"""
|
||||
global summary
|
||||
|
||||
summary[key] = value
|
||||
|
||||
####
|
||||
# Add value to summary key
|
||||
###########################################
|
||||
def add_summary_value(
|
||||
key="",
|
||||
value=""
|
||||
):
|
||||
"""
|
||||
Adds a value to a key in the 'summary' dictionary.
|
||||
|
||||
If the key already exists, the value will be incremented. Otherwise, a new key will be created.
|
||||
|
||||
Args:
|
||||
key (str): Key to add the value to.
|
||||
value (any): Value to add to the key.
|
||||
"""
|
||||
global summary
|
||||
|
||||
if key in summary:
|
||||
summary[key] += value
|
||||
else:
|
||||
set_summary_value(key, value)
|
||||
|
||||
####
|
||||
# Set error level to value
|
||||
###########################################
|
||||
def set_error_level(
|
||||
value=0
|
||||
):
|
||||
"""
|
||||
Sets the error level to the specified value.
|
||||
|
||||
Args:
|
||||
value (int, optional): The error level value. Default is 0.
|
||||
"""
|
||||
global error_level
|
||||
|
||||
error_level = value
|
||||
|
||||
####
|
||||
# Add data to info
|
||||
###########################################
|
||||
def add_info_value(
|
||||
data=""
|
||||
):
|
||||
"""
|
||||
Adds data to the 'info' variable.
|
||||
|
||||
Args:
|
||||
data (str, optional): The data to add to the 'info' variable. Default is an empty string.
|
||||
"""
|
||||
global info
|
||||
|
||||
info += data
|
||||
|
||||
####
|
||||
# Set fixed value to info
|
||||
###########################################
|
||||
def set_info_value(
|
||||
data=""
|
||||
):
|
||||
"""
|
||||
Sets a fixed value to the 'info' variable.
|
||||
|
||||
Args:
|
||||
data (str, optional): The value to set in the 'info' variable. Default is an empty string.
|
||||
"""
|
||||
global info
|
||||
|
||||
info = data
|
||||
|
||||
####
|
||||
# Parse parameters from configuration file
|
||||
###########################################
|
||||
def parse_parameter(
|
||||
config=None,
|
||||
default="",
|
||||
key=""
|
||||
):
|
||||
"""
|
||||
Parses a parameter from the configuration file.
|
||||
|
||||
Args:
|
||||
config (ConfigParser, optional): The ConfigParser object representing the configuration file. Default is None.
|
||||
default (any, optional): The default value to return if the parameter is not found. Default is an empty string.
|
||||
key (str): The key of the parameter to parse.
|
||||
|
||||
Returns:
|
||||
any: The parsed value of the parameter, or the default value if the parameter is not found.
|
||||
"""
|
||||
|
||||
try:
|
||||
return config.get("CONF", key)
|
||||
except Exception as e:
|
||||
return default
|
||||
|
||||
####
|
||||
# Parse configuration file credentials
|
||||
###########################################
|
||||
def parse_conf_entities(
|
||||
entities=""
|
||||
):
|
||||
"""
|
||||
Parses the configuration file credentials.
|
||||
|
||||
Args:
|
||||
entities (str): A JSON string representing the entities.
|
||||
|
||||
Returns:
|
||||
list: A list of entities parsed from the JSON string. If parsing fails, an empty list is returned.
|
||||
"""
|
||||
entities_list = []
|
||||
|
||||
try:
|
||||
parsed_entities = json.loads(entities)
|
||||
if isinstance(parsed_entities, list):
|
||||
entities_list = parsed_entities
|
||||
|
||||
except Exception as e:
|
||||
set_error_level(1)
|
||||
add_info_value("Error while parsing configuration zones or instances: "+str(e)+"\n")
|
||||
|
||||
return entities_list
|
||||
|
||||
|
||||
####
|
||||
# Parse parameter input (int)
|
||||
###########################################
|
||||
def param_int(
|
||||
param=""
|
||||
):
|
||||
"""
|
||||
Parses a parameter as an integer.
|
||||
|
||||
Args:
|
||||
param (any): The parameter to be parsed as an integer.
|
||||
|
||||
Returns:
|
||||
int: The parsed integer value. If parsing fails, returns 0.
|
||||
"""
|
||||
try:
|
||||
return int(param)
|
||||
except:
|
||||
return 0
|
||||
|
||||
####
|
||||
# Print JSON output and exit script
|
||||
###########################################
|
||||
def print_output():
|
||||
"""
|
||||
Prints the JSON output and exits the script.
|
||||
|
||||
The function uses the global variables 'output', 'error_level', 'summary', and 'info'
|
||||
to create the JSON output. It then prints the JSON string and exits the script with
|
||||
the 'error_level' as the exit code.
|
||||
"""
|
||||
|
||||
global output
|
||||
global error_level
|
||||
global summary
|
||||
global info
|
||||
|
||||
output={}
|
||||
if summary:
|
||||
output["summary"] = summary
|
||||
|
||||
if info:
|
||||
output["info"] = info
|
||||
|
||||
json_string = json.dumps(output)
|
||||
|
||||
print(json_string)
|
||||
sys.exit(error_level)
|
|
@ -0,0 +1,130 @@
|
|||
import sys
|
||||
import json
|
||||
|
||||
####
|
||||
# Define some global variables
|
||||
#########################################################################################
|
||||
|
||||
output = {}
|
||||
error_level = 0
|
||||
summary = {}
|
||||
info = ""
|
||||
monitoring_data = []
|
||||
|
||||
####
|
||||
# Set fixed value to summary key
|
||||
#########################################################################################
|
||||
def set_summary_value(
|
||||
key: str = "",
|
||||
value = ""
|
||||
):
|
||||
"""
|
||||
Sets a fixed value for a key in the 'summary' dictionary.
|
||||
|
||||
Args:
|
||||
key (str): Key to set the value for.
|
||||
value (any): Value to assign to the key.
|
||||
"""
|
||||
global summary
|
||||
|
||||
summary[key] = value
|
||||
|
||||
####
|
||||
# Add value to summary key
|
||||
#########################################################################################
|
||||
def add_summary_value(
|
||||
key: str = "",
|
||||
value = ""
|
||||
):
|
||||
"""
|
||||
Adds a value to a key in the 'summary' dictionary.
|
||||
|
||||
If the key already exists, the value will be incremented. Otherwise, a new key will be created.
|
||||
|
||||
Args:
|
||||
key (str): Key to add the value to.
|
||||
value (any): Value to add to the key.
|
||||
"""
|
||||
global summary
|
||||
|
||||
if key in summary:
|
||||
summary[key] += value
|
||||
else:
|
||||
set_summary_value(key, value)
|
||||
|
||||
####
|
||||
# Set error level to value
|
||||
#########################################################################################
|
||||
def set_error_level(
|
||||
value: int = 0
|
||||
):
|
||||
"""
|
||||
Sets the error level to the specified value.
|
||||
|
||||
Args:
|
||||
value (int, optional): The error level value. Default is 0.
|
||||
"""
|
||||
global error_level
|
||||
|
||||
error_level = value
|
||||
|
||||
####
|
||||
# Add data to info
|
||||
#########################################################################################
|
||||
def add_info_value(
|
||||
data: str = ""
|
||||
):
|
||||
"""
|
||||
Adds data to the 'info' variable.
|
||||
|
||||
Args:
|
||||
data (str, optional): The data to add to the 'info' variable. Default is an empty string.
|
||||
"""
|
||||
global info
|
||||
|
||||
info += data
|
||||
|
||||
####
|
||||
# Set fixed value to info
|
||||
#########################################################################################
|
||||
def set_info_value(
|
||||
data: str = ""
|
||||
):
|
||||
"""
|
||||
Sets a fixed value to the 'info' variable.
|
||||
|
||||
Args:
|
||||
data (str, optional): The value to set in the 'info' variable. Default is an empty string.
|
||||
"""
|
||||
global info
|
||||
|
||||
info = data
|
||||
|
||||
####
|
||||
# Print JSON output and exit script
|
||||
#########################################################################################
|
||||
def print_output():
|
||||
"""
|
||||
Prints the JSON output and exits the script.
|
||||
|
||||
The function uses the global variables 'output', 'error_level', 'summary', and 'info'
|
||||
to create the JSON output. It then prints the JSON string and exits the script with
|
||||
the 'error_level' as the exit code.
|
||||
"""
|
||||
|
||||
global output
|
||||
global error_level
|
||||
global summary
|
||||
global info
|
||||
|
||||
output={}
|
||||
if summary:
|
||||
output["summary"] = summary
|
||||
|
||||
if info:
|
||||
output["info"] = info
|
||||
|
||||
json_string = json.dumps(output)
|
||||
|
||||
print(json_string)
|
||||
sys.exit(error_level)
|
|
@ -5,47 +5,99 @@ from datetime import datetime
|
|||
import hashlib
|
||||
|
||||
|
||||
#########################################################################################
|
||||
# Debug_dict: prints dictionary in formatted json string.
|
||||
####
|
||||
# Prints dictionary in formatted json string.
|
||||
#########################################################################################
|
||||
|
||||
class debug_dict:
|
||||
def __init__ (
|
||||
self,
|
||||
jsontxt
|
||||
):
|
||||
self.debug_json = json.dumps (jsontxt, indent=4)
|
||||
print (self.debug_json)
|
||||
|
||||
#########################################################################################
|
||||
# Timedate class
|
||||
#########################################################################################
|
||||
|
||||
#class Timedate:
|
||||
def now(
|
||||
print_flag=None,
|
||||
utimestamp=None
|
||||
def debug_dict(
|
||||
jsontxt = ""
|
||||
):
|
||||
"""Returns time in yyyy/mm/dd HH:MM:SS format by default. Use 1 as an argument
|
||||
to get epoch time (utimestamp)"""
|
||||
if utimestamp:
|
||||
time = datetime.timestamp(datetime.today())
|
||||
else:
|
||||
time = datetime.today().strftime('%Y/%m/%d %H:%M:%S')
|
||||
if print_flag:
|
||||
print (time)
|
||||
else:
|
||||
return (time)
|
||||
"""
|
||||
Prints any list, dict, string, float or integer as a json
|
||||
"""
|
||||
try:
|
||||
debug_json = json.dumps(jsontxt, indent=4)
|
||||
print (debug_json)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"debug_dict: Failed to dump. Error: {e}")
|
||||
except Exception as e:
|
||||
print(f"debug_dict: Unexpected error: {e}")
|
||||
|
||||
####
|
||||
# Assign to a key in a dict a given value.
|
||||
#########################################################################################
|
||||
# Translate macro
|
||||
|
||||
def set_dict_key_value(
|
||||
input_dict: dict = {},
|
||||
input_key: str = "",
|
||||
input_value
|
||||
):
|
||||
"""
|
||||
Assign to a key in a dict a given value
|
||||
"""
|
||||
key = input_key.strip()
|
||||
|
||||
if len(key) > 0:
|
||||
input_dict[key] = input_value
|
||||
|
||||
####
|
||||
# Return MD5 hash string.
|
||||
#########################################################################################
|
||||
|
||||
def generate_md5(
|
||||
input_string: str = ""
|
||||
) -> str:
|
||||
"""
|
||||
Generates an MD5 hash for the given input string.
|
||||
|
||||
Args:
|
||||
input_string (str): The string for which the MD5 hash will be generated.
|
||||
|
||||
Returns:
|
||||
str: The MD5 hash of the input string as a hexadecimal string.
|
||||
"""
|
||||
try:
|
||||
md5_hash = hashlib.md5(input_string.encode()).hexdigest()
|
||||
except:
|
||||
md5_hash = ""
|
||||
|
||||
return md5_hash
|
||||
|
||||
####
|
||||
# Returns or print current time in date format or utimestamp.
|
||||
#########################################################################################
|
||||
|
||||
def now(
|
||||
print_flag: int = 0,
|
||||
utimestamp: int = 0
|
||||
) -> str:
|
||||
"""
|
||||
Returns time in yyyy/mm/dd HH:MM:SS format by default. Use 1 as an argument
|
||||
to get epoch time (utimestamp)
|
||||
"""
|
||||
today = datetime.today()
|
||||
|
||||
if utimestamp:
|
||||
time = datetime.timestamp(today)
|
||||
else:
|
||||
time = today.strftime('%Y/%m/%d %H:%M:%S')
|
||||
|
||||
if print_flag:
|
||||
print(time)
|
||||
|
||||
return time
|
||||
|
||||
####
|
||||
# Translate macros in string from a dict.
|
||||
#########################################################################################
|
||||
def translate_macros(
|
||||
macro_dic: dict,
|
||||
data: str
|
||||
) -> str:
|
||||
"""Expects a macro dictionary key:value (macro_name:macro_value)
|
||||
and a string to replace macro. \n
|
||||
macro_dic: dict = {},
|
||||
data: str = ""
|
||||
) -> str:
|
||||
"""
|
||||
Expects a macro dictionary key:value (macro_name:macro_value)
|
||||
and a string to replace macro.
|
||||
|
||||
It will replace the macro_name for the macro_value in any string.
|
||||
"""
|
||||
for macro_name, macro_value in macro_dic.items():
|
||||
|
@ -54,14 +106,15 @@ def translate_macros(
|
|||
return data
|
||||
|
||||
|
||||
#########################################################################################
|
||||
# Configuration file parser
|
||||
####
|
||||
# Parse configuration file line by line based on separator and return dict.
|
||||
#########################################################################################
|
||||
|
||||
def parse_configuration(
|
||||
file="/etc/pandora/pandora_server.conf",
|
||||
separator=" "
|
||||
):
|
||||
file: str = "/etc/pandora/pandora_server.conf",
|
||||
separator: str = " ",
|
||||
default_values: dict = {}
|
||||
) -> dict:
|
||||
"""
|
||||
Parse configuration. Reads configuration file and stores its data as dict.
|
||||
|
||||
|
@ -73,72 +126,148 @@ def parse_configuration(
|
|||
- dict: containing all keys and values from file.
|
||||
"""
|
||||
config = {}
|
||||
|
||||
try:
|
||||
with open (file, "r") as conf:
|
||||
lines = conf.read().splitlines()
|
||||
for line in lines:
|
||||
if line.startswith("#") or len(line) < 1 :
|
||||
pass
|
||||
if line.strip().startswith("#") or len(line.strip()) < 1 :
|
||||
continue
|
||||
else:
|
||||
option, value = line.strip().split(separator)
|
||||
option, value = line.strip().split(separator, maxsplit=1)
|
||||
config[option.strip()] = value.strip()
|
||||
|
||||
return config
|
||||
except Exception as e:
|
||||
print (f"{type(e).__name__}: {e}")
|
||||
|
||||
for option, value in default_values.items():
|
||||
if option.strip() not in config:
|
||||
config[option.strip()] = value.strip()
|
||||
|
||||
return config
|
||||
|
||||
####
|
||||
# Parse csv file line by line and return list.
|
||||
#########################################################################################
|
||||
# csv file parser
|
||||
#########################################################################################
|
||||
|
||||
def parse_csv_file(
|
||||
file, separator=';',
|
||||
count_parameters=None,
|
||||
debug=False
|
||||
file: str = "",
|
||||
separator: str = ';',
|
||||
count_parameters: int = 0,
|
||||
debug: bool = False
|
||||
) -> list:
|
||||
"""
|
||||
Parse csv configuration. Reads configuration file and stores its data in an array.
|
||||
Parse csv configuration. Reads configuration file and stores its data in a list.
|
||||
|
||||
Args:
|
||||
- file (str): configuration csv file path. \n
|
||||
- separator (str, optional): Separator for option and value. Defaults to ";".
|
||||
- coun_parameters (int): min number of parameters each line shold have. Default None
|
||||
- debug: print errors on lines
|
||||
- debug (bool): print errors on lines
|
||||
|
||||
Returns:
|
||||
- List: containing a list for of values for each csv line.
|
||||
"""
|
||||
csv_arr = []
|
||||
|
||||
try:
|
||||
with open (file, "r") as conf:
|
||||
lines = conf.read().splitlines()
|
||||
with open (file, "r") as csv:
|
||||
lines = csv.read().splitlines()
|
||||
for line in lines:
|
||||
if line.startswith("#") or len(line) < 1 :
|
||||
if line.strip().startswith("#") or len(line.strip()) < 1 :
|
||||
continue
|
||||
else:
|
||||
value = line.strip().split(separator)
|
||||
if count_parameters is None or len(value) >= count_parameters:
|
||||
if len(value) >= count_parameters:
|
||||
csv_arr.append(value)
|
||||
elif debug==True:
|
||||
print(f'Csv line: {line} doesnt match minimun parameter defined: {count_parameters}',file=sys.stderr)
|
||||
print(f'Csv line: {line} does not match minimun parameter defined: {count_parameters}',file=sys.stderr)
|
||||
|
||||
return csv_arr
|
||||
except Exception as e:
|
||||
print (f"{type(e).__name__}: {e}")
|
||||
return 1
|
||||
|
||||
return csv_arr
|
||||
|
||||
####
|
||||
# Parse given variable to integer.
|
||||
#########################################################################################
|
||||
# md5 generator
|
||||
#########################################################################################
|
||||
def generate_md5(input_string):
|
||||
|
||||
def parse_int(
|
||||
var=""
|
||||
) -> int:
|
||||
"""
|
||||
Generates an MD5 hash for the given input string.
|
||||
Parse given variable to integer.
|
||||
|
||||
Args:
|
||||
input_string (str): The string for which the MD5 hash will be generated.
|
||||
var (any): The variable to be parsed as an integer.
|
||||
|
||||
Returns:
|
||||
str: The MD5 hash of the input string as a hexadecimal string.
|
||||
int: The parsed integer value. If parsing fails, returns 0.
|
||||
"""
|
||||
md5_hash = hashlib.md5(input_string.encode()).hexdigest()
|
||||
return md5_hash
|
||||
try:
|
||||
return int(var)
|
||||
except:
|
||||
return 0
|
||||
|
||||
####
|
||||
# Parse given variable to float.
|
||||
#########################################################################################
|
||||
|
||||
def parse_float(
|
||||
var=""
|
||||
) -> float:
|
||||
"""
|
||||
Parse given variable to float.
|
||||
|
||||
Args:
|
||||
var (any): The variable to be parsed as an float.
|
||||
|
||||
Returns:
|
||||
float: The parsed float value. If parsing fails, returns 0.
|
||||
"""
|
||||
try:
|
||||
return float(var)
|
||||
except:
|
||||
return 0
|
||||
|
||||
####
|
||||
# Parse given variable to string.
|
||||
#########################################################################################
|
||||
|
||||
def parse_str(
|
||||
var=""
|
||||
) -> str:
|
||||
"""
|
||||
Parse given variable to string.
|
||||
|
||||
Args:
|
||||
var (any): The variable to be parsed as an string.
|
||||
|
||||
Returns:
|
||||
str: The parsed string value. If parsing fails, returns "".
|
||||
"""
|
||||
try:
|
||||
return str(var)
|
||||
except:
|
||||
return ""
|
||||
|
||||
####
|
||||
# Parse given variable to bool.
|
||||
#########################################################################################
|
||||
|
||||
def parse_bool(
|
||||
var=""
|
||||
) -> bool:
|
||||
"""
|
||||
Parse given variable to bool.
|
||||
|
||||
Args:
|
||||
var (any): The variable to be parsed as an bool.
|
||||
|
||||
Returns:
|
||||
bool: The parsed bool value. If parsing fails, returns False.
|
||||
"""
|
||||
try:
|
||||
return bool(var)
|
||||
except:
|
||||
return False
|
|
@ -3,17 +3,18 @@ from requests.auth import HTTPBasicAuth
|
|||
from requests.auth import HTTPDigestAuth
|
||||
from requests.sessions import Session
|
||||
|
||||
#########################################################################################
|
||||
# URL calls
|
||||
####
|
||||
# Auth URL session
|
||||
#########################################################################################
|
||||
|
||||
def auth_call(
|
||||
session,
|
||||
authtype,
|
||||
user,
|
||||
passw
|
||||
session = None,
|
||||
authtype: str = "basic",
|
||||
user: str = "",
|
||||
passw: str = ""
|
||||
):
|
||||
"""Authentication for url request. Requires request.sessions.Session() object.
|
||||
"""
|
||||
Authentication for url request. Requires request.sessions.Session() object.
|
||||
|
||||
Args:
|
||||
- session (object): request Session() object.
|
||||
|
@ -21,27 +22,34 @@ def auth_call(
|
|||
- user (str): auth user.
|
||||
- passw (str): auth password.
|
||||
"""
|
||||
if authtype == 'ntlm':
|
||||
session.auth = HttpNtlmAuth(user, passw)
|
||||
elif authtype == 'basic':
|
||||
session.auth = HTTPBasicAuth(user, passw)
|
||||
elif authtype == 'digest':
|
||||
session.auth = HTTPDigestAuth(user, passw)
|
||||
if session is not None:
|
||||
if authtype == 'ntlm':
|
||||
session.auth = HttpNtlmAuth(user, passw)
|
||||
elif authtype == 'basic':
|
||||
session.auth = HTTPBasicAuth(user, passw)
|
||||
elif authtype == 'digest':
|
||||
session.auth = HTTPDigestAuth(user, passw)
|
||||
|
||||
####
|
||||
# Call URL and return output
|
||||
#########################################################################################
|
||||
|
||||
def call_url(
|
||||
url,
|
||||
authtype,
|
||||
user,
|
||||
passw,
|
||||
time_out
|
||||
):
|
||||
"""Call URL. Uses request module to get url contents.
|
||||
url: str = "",
|
||||
authtype: str = "basic",
|
||||
user: str = "",
|
||||
passw: str = "",
|
||||
timeout: int = 1
|
||||
) -> str:
|
||||
"""
|
||||
Call URL. Uses request module to get url contents.
|
||||
|
||||
Args:
|
||||
- url (str): URL
|
||||
- authtype (str): ntlm', 'basic', 'digest'. Optional.
|
||||
- user (str): auth user. Optional.
|
||||
- passw (str): auth password. Optional.
|
||||
- timeout (int): session timeout seconds. Optional.
|
||||
|
||||
Returns:
|
||||
- str: call output
|
||||
|
@ -50,11 +58,14 @@ def call_url(
|
|||
with Session() as session:
|
||||
if authtype != None:
|
||||
auth_call(session, authtype, user, passw)
|
||||
|
||||
output = ""
|
||||
|
||||
try:
|
||||
output = session.get(url, timeout=time_out, verify=False)
|
||||
output = session.get(url, timeout=timeout, verify=False)
|
||||
except ValueError:
|
||||
exit("Error: URL format not valid (example http://myserver/page.php)")
|
||||
output = "Error: URL format not valid (example http://myserver/page.php)"
|
||||
except Exception as e:
|
||||
exit(f"{type(e).__name__}:\t{str(e)}")
|
||||
else:
|
||||
return output
|
||||
output = f"{type(e).__name__}:\t{str(e)}"
|
||||
|
||||
return output
|
||||
|
|
|
@ -1,151 +1,160 @@
|
|||
####
|
||||
# Returns module in XML format.
|
||||
# Accepts only {dict}
|
||||
###########################################
|
||||
# Returns module in XML format. Accepts only {dict}
|
||||
#########################################################################################
|
||||
def print_module(
|
||||
module,
|
||||
print_flag=None
|
||||
):
|
||||
"""Returns module in XML format. Accepts only {dict}.\n
|
||||
module: dict = None,
|
||||
print_flag: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
Returns module in XML format. Accepts only {dict}.
|
||||
- Only works with one module at a time: otherwise iteration is needed.
|
||||
- Module "value" field accepts str type or [list] for datalists.
|
||||
- Use print_flag to show modules' XML in STDOUT.
|
||||
"""
|
||||
data = dict(module)
|
||||
module_xml = ("<module>\n"
|
||||
"\t<name><![CDATA[" + str(data["name"]) + "]]></name>\n"
|
||||
"\t<type>" + str(data["type"]) + "</type>\n"
|
||||
)
|
||||
|
||||
if type(data["type"]) is not str and "string" not in data["type"]: #### Strip spaces if module not generic_data_string
|
||||
data["value"] = data["value"].strip()
|
||||
if isinstance(data["value"], list): # Checks if value is a list
|
||||
module_xml += "\t<datalist>\n"
|
||||
for value in data["value"]:
|
||||
if type(value) is dict and "value" in value:
|
||||
module_xml += "\t<data>\n"
|
||||
module_xml += "\t\t<value><![CDATA[" + str(value["value"]) + "]]></value>\n"
|
||||
if "timestamp" in value:
|
||||
module_xml += "\t\t<timestamp><![CDATA[" + str(value["timestamp"]) + "]]></timestamp>\n"
|
||||
module_xml += "\t</data>\n"
|
||||
module_xml += "\t</datalist>\n"
|
||||
else:
|
||||
module_xml += "\t<data><![CDATA[" + str(data["value"]) + "]]></data>\n"
|
||||
if "desc" in data:
|
||||
module_xml += "\t<description><![CDATA[" + str(data["desc"]) + "]]></description>\n"
|
||||
if "unit" in data:
|
||||
module_xml += "\t<unit><![CDATA[" + str(data["unit"]) + "]]></unit>\n"
|
||||
if "interval" in data:
|
||||
module_xml += "\t<module_interval><![CDATA[" + str(data["interval"]) + "]]></module_interval>\n"
|
||||
if "tags" in data:
|
||||
module_xml += "\t<tags>" + str(data["tags"]) + "</tags>\n"
|
||||
if "module_group" in data:
|
||||
module_xml += "\t<module_group>" + str(data["module_group"]) + "</module_group>\n"
|
||||
if "module_parent" in data:
|
||||
module_xml += "\t<module_parent>" + str(data["module_parent"]) + "</module_parent>\n"
|
||||
if "min_warning" in data:
|
||||
module_xml += "\t<min_warning><![CDATA[" + str(data["min_warning"]) + "]]></min_warning>\n"
|
||||
if "min_warning_forced" in data:
|
||||
module_xml += "\t<min_warning_forced><![CDATA[" + str(data["min_warning_forced"]) + "]]></min_warning_forced>\n"
|
||||
if "max_warning" in data:
|
||||
module_xml += "\t<max_warning><![CDATA[" + str(data["max_warning"]) + "]]></max_warning>\n"
|
||||
if "max_warning_forced" in data:
|
||||
module_xml += "\t<max_warning_forced><![CDATA[" + str(data["max_warning_forced"]) + "]]></max_warning_forced>\n"
|
||||
if "min_critical" in data:
|
||||
module_xml += "\t<min_critical><![CDATA[" + str(data["min_critical"]) + "]]></min_critical>\n"
|
||||
if "min_critical_forced" in data:
|
||||
module_xml += "\t<min_critical_forced><![CDATA[" + str(data["min_critical_forced"]) + "]]></min_critical_forced>\n"
|
||||
if "max_critical" in data:
|
||||
module_xml += "\t<max_critical><![CDATA[" + str(data["max_critical"]) + "]]></max_critical>\n"
|
||||
if "max_critical_forced" in data:
|
||||
module_xml += "\t<max_critical_forced><![CDATA[" + str(data["max_critical_forced"]) + "]]></max_critical_forced>\n"
|
||||
if "str_warning" in data:
|
||||
module_xml += "\t<str_warning><![CDATA[" + str(data["str_warning"]) + "]]></str_warning>\n"
|
||||
if "str_warning_forced" in data:
|
||||
module_xml += "\t<str_warning_forced><![CDATA[" + str(data["str_warning_forced"]) + "]]></str_warning_forced>\n"
|
||||
if "str_critical" in data:
|
||||
module_xml += "\t<str_critical><![CDATA[" + str(data["str_critical"]) + "]]></str_critical>\n"
|
||||
if "str_critical_forced" in data:
|
||||
module_xml += "\t<str_critical_forced><![CDATA[" + str(data["str_critical_forced"]) + "]]></str_critical_forced>\n"
|
||||
if "critical_inverse" in data:
|
||||
module_xml += "\t<critical_inverse><![CDATA[" + str(data["critical_inverse"]) + "]]></critical_inverse>\n"
|
||||
if "warning_inverse" in data:
|
||||
module_xml += "\t<warning_inverse><![CDATA[" + str(data["warning_inverse"]) + "]]></warning_inverse>\n"
|
||||
if "max" in data:
|
||||
module_xml += "\t<max><![CDATA[" + str(data["max"]) + "]]></max>\n"
|
||||
if "min" in data:
|
||||
module_xml += "\t<min><![CDATA[" + str(data["min"]) + "]]></min>\n"
|
||||
if "post_process" in data:
|
||||
module_xml += "\t<post_process><![CDATA[" + str(data["post_process"]) + "]]></post_process>\n"
|
||||
if "disabled" in data:
|
||||
module_xml += "\t<disabled><![CDATA[" + str(data["disabled"]) + "]]></disabled>\n"
|
||||
if "min_ff_event" in data:
|
||||
module_xml += "\t<min_ff_event><![CDATA[" + str(data["min_ff_event"]) + "]]></min_ff_event>\n"
|
||||
if "status" in data:
|
||||
module_xml += "\t<status><![CDATA[" + str(data["status"]) + "]]></status>\n"
|
||||
if "timestamp" in data:
|
||||
module_xml += "\t<timestamp><![CDATA[" + str(data["timestamp"]) + "]]></timestamp>\n"
|
||||
if "custom_id" in data:
|
||||
module_xml += "\t<custom_id><![CDATA[" + str(data["custom_id"]) + "]]></custom_id>\n"
|
||||
if "critical_instructions" in data:
|
||||
module_xml += "\t<critical_instructions><![CDATA[" + str(data["critical_instructions"]) + "]]></critical_instructions>\n"
|
||||
if "warning_instructions" in data:
|
||||
module_xml += "\t<warning_instructions><![CDATA[" + str(data["warning_instructions"]) + "]]></warning_instructions>\n"
|
||||
if "unknown_instructions" in data:
|
||||
module_xml += "\t<unknown_instructions><![CDATA[" + str(data["unknown_instructions"]) + "]]></unknown_instructions>\n"
|
||||
if "quiet" in data:
|
||||
module_xml += "\t<quiet><![CDATA[" + str(data["quiet"]) + "]]></quiet>\n"
|
||||
if "module_ff_interval" in data:
|
||||
module_xml += "\t<module_ff_interval><![CDATA[" + str(data["module_ff_interval"]) + "]]></module_ff_interval>\n"
|
||||
if "crontab" in data:
|
||||
module_xml += "\t<crontab><![CDATA[" + str(data["crontab"]) + "]]></crontab>\n"
|
||||
if "min_ff_event_normal" in data:
|
||||
module_xml += "\t<min_ff_event_normal><![CDATA[" + str(data["min_ff_event_normal"]) + "]]></min_ff_event_normal>\n"
|
||||
if "min_ff_event_warning" in data:
|
||||
module_xml += "\t<min_ff_event_warning><![CDATA[" + str(data["min_ff_event_warning"]) + "]]></min_ff_event_warning>\n"
|
||||
if "min_ff_event_critical" in data:
|
||||
module_xml += "\t<min_ff_event_critical><![CDATA[" + str(data["min_ff_event_critical"]) + "]]></min_ff_event_critical>\n"
|
||||
if "ff_type" in data:
|
||||
module_xml += "\t<ff_type><![CDATA[" + str(data["ff_type"]) + "]]></ff_type>\n"
|
||||
if "ff_timeout" in data:
|
||||
module_xml += "\t<ff_timeout><![CDATA[" + str(data["ff_timeout"]) + "]]></ff_timeout>\n"
|
||||
if "each_ff" in data:
|
||||
module_xml += "\t<each_ff><![CDATA[" + str(data["each_ff"]) + "]]></each_ff>\n"
|
||||
if "module_parent_unlink" in data:
|
||||
module_xml += "\t<module_parent_unlink><![CDATA[" + str(data["parent_unlink"]) + "]]></module_parent_unlink>\n"
|
||||
if "global_alerts" in data:
|
||||
for alert in data["alert"]:
|
||||
module_xml += "\t<alert_template><![CDATA[" + alert + "]]></alert_template>\n"
|
||||
module_xml += "</module>\n"
|
||||
module_xml = ""
|
||||
|
||||
if module is not None:
|
||||
data = dict(module)
|
||||
module_xml = ("<module>\n"
|
||||
"\t<name><![CDATA[" + str(data["name"]) + "]]></name>\n"
|
||||
"\t<type>" + str(data["type"]) + "</type>\n"
|
||||
)
|
||||
|
||||
if type(data["type"]) is not str and "string" not in data["type"]: #### Strip spaces if module not generic_data_string
|
||||
data["value"] = data["value"].strip()
|
||||
|
||||
if isinstance(data["value"], list): # Checks if value is a list
|
||||
module_xml += "\t<datalist>\n"
|
||||
for value in data["value"]:
|
||||
if type(value) is dict and "value" in value:
|
||||
module_xml += "\t<data>\n"
|
||||
module_xml += "\t\t<value><![CDATA[" + str(value["value"]) + "]]></value>\n"
|
||||
if "timestamp" in value:
|
||||
module_xml += "\t\t<timestamp><![CDATA[" + str(value["timestamp"]) + "]]></timestamp>\n"
|
||||
module_xml += "\t</data>\n"
|
||||
module_xml += "\t</datalist>\n"
|
||||
else:
|
||||
module_xml += "\t<data><![CDATA[" + str(data["value"]) + "]]></data>\n"
|
||||
|
||||
if "desc" in data:
|
||||
module_xml += "\t<description><![CDATA[" + str(data["desc"]) + "]]></description>\n"
|
||||
if "unit" in data:
|
||||
module_xml += "\t<unit><![CDATA[" + str(data["unit"]) + "]]></unit>\n"
|
||||
if "interval" in data:
|
||||
module_xml += "\t<module_interval><![CDATA[" + str(data["interval"]) + "]]></module_interval>\n"
|
||||
if "tags" in data:
|
||||
module_xml += "\t<tags>" + str(data["tags"]) + "</tags>\n"
|
||||
if "module_group" in data:
|
||||
module_xml += "\t<module_group>" + str(data["module_group"]) + "</module_group>\n"
|
||||
if "module_parent" in data:
|
||||
module_xml += "\t<module_parent>" + str(data["module_parent"]) + "</module_parent>\n"
|
||||
if "min_warning" in data:
|
||||
module_xml += "\t<min_warning><![CDATA[" + str(data["min_warning"]) + "]]></min_warning>\n"
|
||||
if "min_warning_forced" in data:
|
||||
module_xml += "\t<min_warning_forced><![CDATA[" + str(data["min_warning_forced"]) + "]]></min_warning_forced>\n"
|
||||
if "max_warning" in data:
|
||||
module_xml += "\t<max_warning><![CDATA[" + str(data["max_warning"]) + "]]></max_warning>\n"
|
||||
if "max_warning_forced" in data:
|
||||
module_xml += "\t<max_warning_forced><![CDATA[" + str(data["max_warning_forced"]) + "]]></max_warning_forced>\n"
|
||||
if "min_critical" in data:
|
||||
module_xml += "\t<min_critical><![CDATA[" + str(data["min_critical"]) + "]]></min_critical>\n"
|
||||
if "min_critical_forced" in data:
|
||||
module_xml += "\t<min_critical_forced><![CDATA[" + str(data["min_critical_forced"]) + "]]></min_critical_forced>\n"
|
||||
if "max_critical" in data:
|
||||
module_xml += "\t<max_critical><![CDATA[" + str(data["max_critical"]) + "]]></max_critical>\n"
|
||||
if "max_critical_forced" in data:
|
||||
module_xml += "\t<max_critical_forced><![CDATA[" + str(data["max_critical_forced"]) + "]]></max_critical_forced>\n"
|
||||
if "str_warning" in data:
|
||||
module_xml += "\t<str_warning><![CDATA[" + str(data["str_warning"]) + "]]></str_warning>\n"
|
||||
if "str_warning_forced" in data:
|
||||
module_xml += "\t<str_warning_forced><![CDATA[" + str(data["str_warning_forced"]) + "]]></str_warning_forced>\n"
|
||||
if "str_critical" in data:
|
||||
module_xml += "\t<str_critical><![CDATA[" + str(data["str_critical"]) + "]]></str_critical>\n"
|
||||
if "str_critical_forced" in data:
|
||||
module_xml += "\t<str_critical_forced><![CDATA[" + str(data["str_critical_forced"]) + "]]></str_critical_forced>\n"
|
||||
if "critical_inverse" in data:
|
||||
module_xml += "\t<critical_inverse><![CDATA[" + str(data["critical_inverse"]) + "]]></critical_inverse>\n"
|
||||
if "warning_inverse" in data:
|
||||
module_xml += "\t<warning_inverse><![CDATA[" + str(data["warning_inverse"]) + "]]></warning_inverse>\n"
|
||||
if "max" in data:
|
||||
module_xml += "\t<max><![CDATA[" + str(data["max"]) + "]]></max>\n"
|
||||
if "min" in data:
|
||||
module_xml += "\t<min><![CDATA[" + str(data["min"]) + "]]></min>\n"
|
||||
if "post_process" in data:
|
||||
module_xml += "\t<post_process><![CDATA[" + str(data["post_process"]) + "]]></post_process>\n"
|
||||
if "disabled" in data:
|
||||
module_xml += "\t<disabled><![CDATA[" + str(data["disabled"]) + "]]></disabled>\n"
|
||||
if "min_ff_event" in data:
|
||||
module_xml += "\t<min_ff_event><![CDATA[" + str(data["min_ff_event"]) + "]]></min_ff_event>\n"
|
||||
if "status" in data:
|
||||
module_xml += "\t<status><![CDATA[" + str(data["status"]) + "]]></status>\n"
|
||||
if "timestamp" in data:
|
||||
module_xml += "\t<timestamp><![CDATA[" + str(data["timestamp"]) + "]]></timestamp>\n"
|
||||
if "custom_id" in data:
|
||||
module_xml += "\t<custom_id><![CDATA[" + str(data["custom_id"]) + "]]></custom_id>\n"
|
||||
if "critical_instructions" in data:
|
||||
module_xml += "\t<critical_instructions><![CDATA[" + str(data["critical_instructions"]) + "]]></critical_instructions>\n"
|
||||
if "warning_instructions" in data:
|
||||
module_xml += "\t<warning_instructions><![CDATA[" + str(data["warning_instructions"]) + "]]></warning_instructions>\n"
|
||||
if "unknown_instructions" in data:
|
||||
module_xml += "\t<unknown_instructions><![CDATA[" + str(data["unknown_instructions"]) + "]]></unknown_instructions>\n"
|
||||
if "quiet" in data:
|
||||
module_xml += "\t<quiet><![CDATA[" + str(data["quiet"]) + "]]></quiet>\n"
|
||||
if "module_ff_interval" in data:
|
||||
module_xml += "\t<module_ff_interval><![CDATA[" + str(data["module_ff_interval"]) + "]]></module_ff_interval>\n"
|
||||
if "crontab" in data:
|
||||
module_xml += "\t<crontab><![CDATA[" + str(data["crontab"]) + "]]></crontab>\n"
|
||||
if "min_ff_event_normal" in data:
|
||||
module_xml += "\t<min_ff_event_normal><![CDATA[" + str(data["min_ff_event_normal"]) + "]]></min_ff_event_normal>\n"
|
||||
if "min_ff_event_warning" in data:
|
||||
module_xml += "\t<min_ff_event_warning><![CDATA[" + str(data["min_ff_event_warning"]) + "]]></min_ff_event_warning>\n"
|
||||
if "min_ff_event_critical" in data:
|
||||
module_xml += "\t<min_ff_event_critical><![CDATA[" + str(data["min_ff_event_critical"]) + "]]></min_ff_event_critical>\n"
|
||||
if "ff_type" in data:
|
||||
module_xml += "\t<ff_type><![CDATA[" + str(data["ff_type"]) + "]]></ff_type>\n"
|
||||
if "ff_timeout" in data:
|
||||
module_xml += "\t<ff_timeout><![CDATA[" + str(data["ff_timeout"]) + "]]></ff_timeout>\n"
|
||||
if "each_ff" in data:
|
||||
module_xml += "\t<each_ff><![CDATA[" + str(data["each_ff"]) + "]]></each_ff>\n"
|
||||
if "module_parent_unlink" in data:
|
||||
module_xml += "\t<module_parent_unlink><![CDATA[" + str(data["parent_unlink"]) + "]]></module_parent_unlink>\n"
|
||||
if "alert" in data:
|
||||
for alert in data["alert"]:
|
||||
module_xml += "\t<alert_template><![CDATA[" + alert + "]]></alert_template>\n"
|
||||
module_xml += "</module>\n"
|
||||
|
||||
if print_flag:
|
||||
print (module_xml)
|
||||
print(module_xml)
|
||||
|
||||
return (module_xml)
|
||||
return module_xml
|
||||
|
||||
|
||||
#########################################################################################
|
||||
# print_module
|
||||
####
|
||||
# Returns log module in XML format. Accepts only {dict}
|
||||
#########################################################################################
|
||||
|
||||
def print_log_module(
|
||||
module,
|
||||
print_flag = None
|
||||
):
|
||||
"""Returns log module in XML format. Accepts only {dict}.\n
|
||||
module: dict = None,
|
||||
print_flag: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
Returns log module in XML format. Accepts only {dict}.
|
||||
- Only works with one module at a time: otherwise iteration is needed.
|
||||
- Module "value" field accepts str type.
|
||||
- Use not_print_flag to avoid printing the XML (only populates variables).
|
||||
"""
|
||||
data = dict(module)
|
||||
module_xml = ("<log_module>\n"
|
||||
"\t<source><![CDATA[" + str(data["source"]) + "]]></source>\n"
|
||||
"\t<data>\"" + str(data["value"]) + "\"</data>\n"
|
||||
)
|
||||
|
||||
module_xml += "</log_module>\n"
|
||||
module_xml = ""
|
||||
|
||||
if module is not None:
|
||||
data = dict(module)
|
||||
module_xml = ("<log_module>\n"
|
||||
"\t<source><![CDATA[" + str(data["source"]) + "]]></source>\n"
|
||||
"\t<data>\"" + str(data["value"]) + "\"</data>\n"
|
||||
)
|
||||
|
||||
module_xml += "</log_module>\n"
|
||||
|
||||
if print_flag:
|
||||
print (module_xml)
|
||||
print(module_xml)
|
||||
|
||||
return (module_xml)
|
||||
return module_xml
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
import sys
|
||||
from queue import Queue
|
||||
from threading import Thread
|
||||
|
||||
####
|
||||
# Internal use only: Run a given function in a thread
|
||||
#########################################################################################
|
||||
def _single_thread(
|
||||
q = None,
|
||||
function: callable = None,
|
||||
errors: list = []
|
||||
):
|
||||
"""
|
||||
Internal use only: Run a given function in a thread
|
||||
"""
|
||||
params=q.get()
|
||||
q.task_done()
|
||||
try:
|
||||
function(params)
|
||||
except Exception as e:
|
||||
errors.append("Error while runing single thread: "+str(e))
|
||||
|
||||
####
|
||||
# Run a given function for given items list in a given number of threads
|
||||
#########################################################################################
|
||||
def run_threads(
|
||||
max_threads: int = 1,
|
||||
function: callable = None,
|
||||
items: list = []
|
||||
) -> bool:
|
||||
"""
|
||||
Run a given function for given items list in a given number of threads
|
||||
"""
|
||||
|
||||
# Assign threads
|
||||
threads = max_threads
|
||||
|
||||
if threads > len(items):
|
||||
threads = len(items)
|
||||
|
||||
if threads < 1:
|
||||
threads = 1
|
||||
|
||||
# Distribute items per thread
|
||||
items_per_thread = []
|
||||
thread = 0
|
||||
for item in items:
|
||||
if not 0 <= thread < len(items_per_thread):
|
||||
items_per_thread.append([])
|
||||
|
||||
items_per_thread[thread].append(item)
|
||||
|
||||
thread += 1
|
||||
if thread >= threads:
|
||||
thread=0
|
||||
|
||||
# Run threads
|
||||
try:
|
||||
q=Queue()
|
||||
for n_thread in range(threads) :
|
||||
q.put(items_per_thread[n_thread])
|
||||
|
||||
run_threads = []
|
||||
errors = []
|
||||
|
||||
for n_thread in range(threads):
|
||||
t = Thread(target=_single_thread, args=(q, function, errors))
|
||||
t.daemon=True
|
||||
t.start()
|
||||
run_threads.append(t)
|
||||
|
||||
for t in run_threads:
|
||||
t.join()
|
||||
|
||||
q.join()
|
||||
|
||||
for error in errors:
|
||||
print(error,file=sys.stderr)
|
||||
|
||||
if len(errors) > 0:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print("Error while running threads: "+str(e)+"\n",file=sys.stderr)
|
||||
return False
|
|
@ -4,22 +4,29 @@ import shutil
|
|||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
from .general import generate_md5,set_dict_key_value
|
||||
from .agents import print_agent
|
||||
|
||||
####
|
||||
# Define global variables dict, used in functions as default values.
|
||||
# Its values can be changed.
|
||||
#########################################################################################
|
||||
|
||||
global_variables = {
|
||||
'transfer_mode' : 'tentacle',
|
||||
'temporal' : '/tmp',
|
||||
'data_dir' : '/var/spool/pandora/data_in/',
|
||||
'tentacle_client' : 'tentacle_client',
|
||||
'tentacle_ip' : '127.0.0.1',
|
||||
'tentacle_port' : 41121
|
||||
'transfer_mode' : 'tentacle',
|
||||
'temporal' : '/tmp',
|
||||
'data_dir' : '/var/spool/pandora/data_in/',
|
||||
'tentacle_client' : 'tentacle_client',
|
||||
'tentacle_ip' : '127.0.0.1',
|
||||
'tentacle_port' : 41121,
|
||||
'tentacle_extra_opts' : ''
|
||||
}
|
||||
|
||||
####
|
||||
# Set a global variable with the specified name and assigns a value to it.
|
||||
###########################################
|
||||
#########################################################################################
|
||||
def set_global_variable(
|
||||
variable_name,
|
||||
variable_name: str = "",
|
||||
value
|
||||
):
|
||||
"""
|
||||
|
@ -29,123 +36,131 @@ def set_global_variable(
|
|||
variable_name (str): Name of the variable to set.
|
||||
value (any): Value to assign to the variable.
|
||||
"""
|
||||
|
||||
global_variables[variable_name] = value
|
||||
set_dict_key_value(global_variables, variable_name, value)
|
||||
|
||||
####
|
||||
# Sends file using tentacle protocol
|
||||
###########################################
|
||||
#########################################################################################
|
||||
def tentacle_xml(
|
||||
file,
|
||||
tentacle_ops,
|
||||
tentacle_path='',
|
||||
debug=0
|
||||
):
|
||||
"""Sends file using tentacle protocol\n
|
||||
data_file: str = "",
|
||||
tentacle_ops: dict = {},
|
||||
tentacle_path: str = global_variables['tentacle_client'],
|
||||
debug: int = 0,
|
||||
print_errors: bool = True
|
||||
) -> bool:
|
||||
"""
|
||||
Sends file using tentacle protocol
|
||||
- Only works with one file at time.
|
||||
- file variable needs full file path.
|
||||
- tentacle_opts should be a dict with tentacle options (address [password] [port]).
|
||||
- tentacle_path allows to define a custom path for tentacle client in case is not in sys path).
|
||||
- if debug is enabled, the data file will not be removed after being sent.
|
||||
- if print_errors is enabled, function will print all error messages
|
||||
|
||||
Returns 0 for OK and 1 for errors.
|
||||
Returns True for OK and False for errors.
|
||||
"""
|
||||
|
||||
if file is None :
|
||||
msg="Tentacle error: file path is required."
|
||||
print(str(datetime.today().strftime('%Y-%m-%d %H:%M')) + msg, file=sys.stderr)
|
||||
else :
|
||||
data_file = file
|
||||
if data_file is not None :
|
||||
|
||||
if tentacle_ops['address'] is None :
|
||||
msg="Tentacle error: No address defined"
|
||||
print(str(datetime.today().strftime('%Y-%m-%d %H:%M')) + msg, file=sys.stderr)
|
||||
return 1
|
||||
if not 'address' in tentacle_ops:
|
||||
tentacle_ops['address'] = global_variables['tentacle_ip']
|
||||
if not 'port' in tentacle_ops:
|
||||
tentacle_ops['port'] = global_variables['tentacle_port']
|
||||
if not 'extra_opts' in tentacle_ops:
|
||||
tentacle_ops['extra_opts'] = global_variables['tentacle_extra_opts']
|
||||
|
||||
if tentacle_ops['address'] is None :
|
||||
if print_errors:
|
||||
sys.stderr.write("Tentacle error: No address defined")
|
||||
return False
|
||||
|
||||
try :
|
||||
with open(data_file.strip(), 'r') as data:
|
||||
data.read()
|
||||
data.close()
|
||||
except Exception as e :
|
||||
if print_errors:
|
||||
sys.stderr.write(f"Tentacle error: {type(e).__name__} {e}")
|
||||
return False
|
||||
|
||||
tentacle_cmd = f"{tentacle_path} -v -a {tentacle_ops['address']} -p {tentacle_ops['port']} {tentacle_ops['extra_opts']} {data_file.strip()}"
|
||||
|
||||
tentacle_exe=Popen(tentacle_cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE, shell=True)
|
||||
rc=tentacle_exe.wait()
|
||||
|
||||
if debug == 0 :
|
||||
os.remove(data_file.strip())
|
||||
|
||||
if rc != 0 :
|
||||
if print_errors:
|
||||
stderr = tentacle_exe.stderr.read().decode()
|
||||
msg="Tentacle error:" + str(stderr)
|
||||
print(str(datetime.today().strftime('%Y-%m-%d %H:%M')) + msg , file=sys.stderr)
|
||||
return False
|
||||
|
||||
try :
|
||||
with open(data_file, 'r') as data:
|
||||
data.read()
|
||||
data.close()
|
||||
except Exception as e :
|
||||
msg=f"Tentacle error: {type(e).__name__} {e}"
|
||||
print(str(datetime.today().strftime('%Y-%m-%d %H:%M')) + msg , file=sys.stderr)
|
||||
return 1
|
||||
|
||||
tentacle_cmd = f"{tentacle_path}{global_variables['tentacle_client']} -v -a {tentacle_ops['address']} {global_variables['tentacle_opts']}"
|
||||
if "port" in tentacle_ops:
|
||||
tentacle_cmd += f"-p {tentacle_ops['port']} "
|
||||
if "password" in tentacle_ops:
|
||||
tentacle_cmd += f"-x {tentacle_ops['password']} "
|
||||
tentacle_cmd += f"{data_file.strip()} "
|
||||
|
||||
tentacle_exe=Popen(tentacle_cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE, shell=True)
|
||||
rc=tentacle_exe.wait()
|
||||
|
||||
if rc != 0 :
|
||||
stderr = tentacle_exe.stderr.read().decode()
|
||||
msg="Tentacle error:" + str(stderr)
|
||||
print(str(datetime.today().strftime('%Y-%m-%d %H:%M')) + msg , file=sys.stderr)
|
||||
next
|
||||
return 1
|
||||
elif debug == 0 :
|
||||
os.remove(file)
|
||||
|
||||
return 0
|
||||
else:
|
||||
if print_errors:
|
||||
sys.stderr.write("Tentacle error: file path is required.")
|
||||
return False
|
||||
|
||||
####
|
||||
# Detect transfer mode and execute
|
||||
###########################################
|
||||
def agentplugin(
|
||||
modules,
|
||||
agent,
|
||||
temp_dir=global_variables['temporal'],
|
||||
tentacle=False,
|
||||
tentacle_conf=None
|
||||
):
|
||||
"""
|
||||
Detects the transfer mode and executes the corresponding action.
|
||||
|
||||
Args:
|
||||
modules (list): List of modules.
|
||||
agent (dict): Dictionary with agent configuration.
|
||||
temp_dir (str, optional): Temporary directory. Default is global_variables['temporal'].
|
||||
tentacle (bool, optional): Indicates whether to use the Tentacle protocol. Default is False.
|
||||
tentacle_conf (dict, optional): Dictionary with Tentacle protocol configuration. Default is None.
|
||||
"""
|
||||
agent_file=print_agent(agent,modules,temp_dir)
|
||||
|
||||
if agent_file[1] is not None:
|
||||
if tentacle == True and tentacle_conf is not None:
|
||||
tentacle_xml(agent_file[1],tentacle_conf)
|
||||
else:
|
||||
shutil.move(agent_file[1], global_variables['data_dir'])
|
||||
|
||||
####
|
||||
# Detect transfer mode and execute (call agentplugin())
|
||||
###########################################
|
||||
# Detect transfer mode and send XML.
|
||||
#########################################################################################
|
||||
def transfer_xml(
|
||||
agent,
|
||||
modules,
|
||||
transfer_mode=global_variables['transfer_mode'],
|
||||
tentacle_ip=global_variables['tentacle_ip'],
|
||||
tentacle_port=global_variables['tentacle_port'],
|
||||
temporal=global_variables['temporal']
|
||||
file: str = "",
|
||||
transfer_mode: str = global_variables['transfer_mode'],
|
||||
tentacle_ip: str = global_variables['tentacle_ip'],
|
||||
tentacle_port: int = global_variables['tentacle_port'],
|
||||
tentacle_extra_opts: str = global_variables['tentacle_extra_opts'],
|
||||
data_dir: str = global_variables['data_dir']
|
||||
):
|
||||
|
||||
"""
|
||||
Detects the transfer mode and calls the agentplugin() function to perform the transfer.
|
||||
|
||||
Args:
|
||||
agent (dict): Dictionary with agent configuration.
|
||||
modules (list): List of modules.
|
||||
file (str): Path to file to send.
|
||||
transfer_mode (str, optional): Transfer mode. Default is global_variables['transfer_mode'].
|
||||
tentacle_ip (str, optional): IP address for Tentacle. Default is global_variables['tentacle_ip'].
|
||||
tentacle_port (str, optional): Port for Tentacle. Default is global_variables['tentacle_port'].
|
||||
temporal (str, optional): Temporary directory. Default is global_variables['temporal'].
|
||||
data_dir (str, optional): Path to data dir with local transfer mode. Default is global_variables['data_dir'].
|
||||
"""
|
||||
if file is not None:
|
||||
if transfer_mode != "local":
|
||||
tentacle_conf = {
|
||||
'address' : tentacle_ip,
|
||||
'port' : tentacle_port,
|
||||
'extra_opts' : tentacle_extra_opts
|
||||
}
|
||||
tentacle_xml(file, tentacle_conf)
|
||||
else:
|
||||
shutil.move(file, data_dir)
|
||||
|
||||
####
|
||||
# Creates a agent .data file in the specified data_dir folder
|
||||
#########################################################################################
|
||||
def write_xml(
|
||||
xml: str = "",
|
||||
agent_name: str = "",
|
||||
data_dir: str = global_variables['temporal']
|
||||
) -> str:
|
||||
"""
|
||||
Creates a agent .data file in the specified data_dir folder
|
||||
Args:
|
||||
- xml (str): XML string to be written in the file.
|
||||
- agent_name (str): agent name for the xml and file name.
|
||||
- data_dir (str): folder in which the file will be created.
|
||||
"""
|
||||
Utime = datetime.now().strftime('%s')
|
||||
agent_name_md5 = generate_md5(agent_name)
|
||||
data_file = "%s/%s.%s.data" %(str(data_dir),agent_name_md5,str(Utime))
|
||||
|
||||
if transfer_mode != "local" and tentacle_ip is not None:
|
||||
tentacle_conf={"address":tentacle_ip,"port":tentacle_port}
|
||||
agentplugin(modules,agent,temporal,True,tentacle_conf)
|
||||
else:
|
||||
agentplugin(modules,agent,temporal)
|
||||
try:
|
||||
with open(data_file, 'x') as data:
|
||||
data.write(xml)
|
||||
except OSError as o:
|
||||
print(f"ERROR - Could not write file: {o}, please check directory permissions", file=sys.stderr)
|
||||
except Exception as e:
|
||||
print(f"{type(e).__name__}: {e}", file=sys.stderr)
|
||||
|
||||
return data_file
|
||||
|
|
Loading…
Reference in New Issue