2019-11-02 14:00:06 +01:00
|
|
|
/* Icinga 2 | (c) 2012 Icinga GmbH | GPLv2+ */
|
2017-09-25 14:41:43 +02:00
|
|
|
|
2019-10-29 18:36:16 +01:00
|
|
|
#include "icingadb/icingadb.hpp"
|
2018-10-29 13:48:15 +01:00
|
|
|
#include "base/configtype.hpp"
|
2018-06-05 15:01:43 +02:00
|
|
|
#include "base/object-packer.hpp"
|
2017-09-25 14:41:43 +02:00
|
|
|
#include "base/logger.hpp"
|
|
|
|
#include "base/serializer.hpp"
|
|
|
|
#include "base/tlsutility.hpp"
|
|
|
|
#include "base/initialize.hpp"
|
2018-06-05 16:46:02 +02:00
|
|
|
#include "base/objectlock.hpp"
|
2018-06-22 15:55:19 +02:00
|
|
|
#include "base/array.hpp"
|
|
|
|
#include "base/scriptglobal.hpp"
|
2018-09-13 15:40:27 +02:00
|
|
|
#include "base/convert.hpp"
|
2019-11-02 11:59:58 +01:00
|
|
|
#include "base/json.hpp"
|
2019-11-02 18:01:31 +01:00
|
|
|
#include "icinga/customvarobject.hpp"
|
|
|
|
#include "icinga/checkcommand.hpp"
|
|
|
|
#include "icinga/notificationcommand.hpp"
|
|
|
|
#include "icinga/eventcommand.hpp"
|
|
|
|
#include "icinga/host.hpp"
|
|
|
|
#include <boost/algorithm/string.hpp>
|
2018-07-03 15:52:35 +02:00
|
|
|
#include <map>
|
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
2017-09-25 14:41:43 +02:00
|
|
|
|
|
|
|
using namespace icinga;
|
|
|
|
|
2019-10-29 17:32:29 +01:00
|
|
|
String IcingaDB::FormatCheckSumBinary(const String& str)
|
2017-09-25 14:41:43 +02:00
|
|
|
{
|
|
|
|
char output[20*2+1];
|
|
|
|
for (int i = 0; i < 20; i++)
|
|
|
|
sprintf(output + 2 * i, "%02x", str[i]);
|
|
|
|
|
|
|
|
return output;
|
|
|
|
}
|
|
|
|
|
2019-10-29 17:32:29 +01:00
|
|
|
String IcingaDB::FormatCommandLine(const Value& commandLine)
|
2018-12-03 15:47:05 +01:00
|
|
|
{
|
|
|
|
String result;
|
|
|
|
if (commandLine.IsObjectType<Array>()) {
|
|
|
|
Array::Ptr args = commandLine;
|
|
|
|
bool first = true;
|
|
|
|
|
|
|
|
ObjectLock olock(args);
|
|
|
|
for (const Value& arg : args) {
|
|
|
|
String token = "'" + Convert::ToString(arg) + "'";
|
|
|
|
|
|
|
|
if (first)
|
|
|
|
first = false;
|
|
|
|
else
|
|
|
|
result += String(1, ' ');
|
|
|
|
|
|
|
|
result += token;
|
|
|
|
}
|
|
|
|
} else if (!commandLine.IsEmpty()) {
|
|
|
|
result = commandLine;
|
|
|
|
boost::algorithm::replace_all(result, "\'", "\\'");
|
2018-12-03 16:31:06 +01:00
|
|
|
result = "'" + result + "'";
|
2018-12-03 15:47:05 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2019-12-06 11:46:42 +01:00
|
|
|
String IcingaDB::GetObjectIdentifier(const ConfigObject::Ptr& object)
|
|
|
|
{
|
2021-11-02 16:42:27 +01:00
|
|
|
return HashValue(new Array({m_EnvironmentId, object->GetName()}));
|
2018-06-22 15:55:19 +02:00
|
|
|
}
|
|
|
|
|
Icinga DB: make icinga:history:stream:*#event_id deterministic
... i.e. UUID -> SHA1(env, eventType, x...) given that SHA1(env, x...) = type-specific ID.
Rationale: allow both masters to write the same history concurrently (while not
in split-brain), so that REPLACE INTO deduplicates the same events written twice.
* ack: SHA1(env, "ack_set"|"ack_clear", checkable.name, setTime)
* comment: SHA1(env, "comment_add"|"comment_remove", comment.name)
* downtime: SHA1(env, "downtime_start"|"downtime_end", downtime.name)
* flapping: SHA1(env, "flapping_start"|"flapping_end", checkable.name, startTime)
* notification: SHA1(env, "notification", notification.name, notificationType, sendTime)
* state: SHA1(env, "state_change", checkable.name, changeTime)
2021-10-11 17:36:40 +02:00
|
|
|
/**
|
|
|
|
* Calculates a deterministic history event ID like SHA1(env, eventType, x...[, nt][, eventTime])
|
|
|
|
*
|
|
|
|
* Where SHA1(env, x...) = GetObjectIdentifier(object)
|
|
|
|
*/
|
|
|
|
String IcingaDB::CalcEventID(const char* eventType, const ConfigObject::Ptr& object, double eventTime, NotificationType nt)
|
|
|
|
{
|
2021-11-02 16:42:27 +01:00
|
|
|
Array::Ptr rawId = new Array({object->GetName()});
|
2021-09-21 12:56:20 +02:00
|
|
|
rawId->Insert(0, m_EnvironmentId);
|
Icinga DB: make icinga:history:stream:*#event_id deterministic
... i.e. UUID -> SHA1(env, eventType, x...) given that SHA1(env, x...) = type-specific ID.
Rationale: allow both masters to write the same history concurrently (while not
in split-brain), so that REPLACE INTO deduplicates the same events written twice.
* ack: SHA1(env, "ack_set"|"ack_clear", checkable.name, setTime)
* comment: SHA1(env, "comment_add"|"comment_remove", comment.name)
* downtime: SHA1(env, "downtime_start"|"downtime_end", downtime.name)
* flapping: SHA1(env, "flapping_start"|"flapping_end", checkable.name, startTime)
* notification: SHA1(env, "notification", notification.name, notificationType, sendTime)
* state: SHA1(env, "state_change", checkable.name, changeTime)
2021-10-11 17:36:40 +02:00
|
|
|
rawId->Insert(1, eventType);
|
|
|
|
|
|
|
|
if (nt) {
|
|
|
|
rawId->Add(GetNotificationTypeByEnum(nt));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (eventTime) {
|
|
|
|
rawId->Add(TimestampToMilliseconds(eventTime));
|
|
|
|
}
|
|
|
|
|
|
|
|
return HashValue(std::move(rawId));
|
|
|
|
}
|
|
|
|
|
2018-06-07 12:49:38 +02:00
|
|
|
static const std::set<String> metadataWhitelist ({"package", "source_location", "templates"});
|
|
|
|
|
2018-07-03 15:52:35 +02:00
|
|
|
/**
|
2021-10-08 16:43:09 +02:00
|
|
|
* Prepare custom vars for being written to Redis
|
2018-07-03 15:52:35 +02:00
|
|
|
*
|
|
|
|
* object.vars = {
|
|
|
|
* "disks": {
|
|
|
|
* "disk": {},
|
|
|
|
* "disk /": {
|
|
|
|
* "disk_partitions": "/"
|
|
|
|
* }
|
|
|
|
* }
|
|
|
|
* }
|
|
|
|
*
|
|
|
|
* return {
|
|
|
|
* SHA1(PackObject([
|
2021-09-21 12:56:20 +02:00
|
|
|
* EnvironmentId,
|
2018-07-03 15:52:35 +02:00
|
|
|
* "disks",
|
|
|
|
* {
|
|
|
|
* "disk": {},
|
|
|
|
* "disk /": {
|
|
|
|
* "disk_partitions": "/"
|
|
|
|
* }
|
|
|
|
* }
|
|
|
|
* ])): {
|
2021-09-21 12:56:20 +02:00
|
|
|
* "environment_id": EnvironmentId,
|
2018-07-03 15:52:35 +02:00
|
|
|
* "name_checksum": SHA1("disks"),
|
|
|
|
* "name": "disks",
|
|
|
|
* "value": {
|
|
|
|
* "disk": {},
|
|
|
|
* "disk /": {
|
|
|
|
* "disk_partitions": "/"
|
|
|
|
* }
|
|
|
|
* }
|
|
|
|
* }
|
|
|
|
* }
|
|
|
|
*
|
2021-10-08 16:43:09 +02:00
|
|
|
* @param Dictionary Config object with custom vars
|
2018-07-03 15:52:35 +02:00
|
|
|
*
|
2021-10-08 16:43:09 +02:00
|
|
|
* @return JSON-like data structure for Redis
|
2018-07-03 15:52:35 +02:00
|
|
|
*/
|
2021-10-08 16:43:09 +02:00
|
|
|
Dictionary::Ptr IcingaDB::SerializeVars(const Dictionary::Ptr& vars)
|
2018-07-03 15:52:35 +02:00
|
|
|
{
|
|
|
|
if (!vars)
|
|
|
|
return nullptr;
|
|
|
|
|
|
|
|
Dictionary::Ptr res = new Dictionary();
|
|
|
|
|
2018-07-04 16:44:44 +02:00
|
|
|
ObjectLock olock(vars);
|
|
|
|
|
2018-07-03 15:52:35 +02:00
|
|
|
for (auto& kv : vars) {
|
|
|
|
res->Set(
|
2021-09-21 12:56:20 +02:00
|
|
|
SHA1(PackObject((Array::Ptr)new Array({m_EnvironmentId, kv.first, kv.second}))),
|
2018-07-03 15:52:35 +02:00
|
|
|
(Dictionary::Ptr)new Dictionary({
|
2021-09-21 12:56:20 +02:00
|
|
|
{"environment_id", m_EnvironmentId},
|
2018-07-03 15:52:35 +02:00
|
|
|
{"name_checksum", SHA1(kv.first)},
|
|
|
|
{"name", kv.first},
|
2019-07-18 15:02:47 +02:00
|
|
|
{"value", JsonEncode(kv.second)},
|
2018-07-03 15:52:35 +02:00
|
|
|
})
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2021-10-08 18:25:19 +02:00
|
|
|
const char* IcingaDB::GetNotificationTypeByEnum(NotificationType type)
|
|
|
|
{
|
|
|
|
switch (type) {
|
|
|
|
case NotificationDowntimeStart:
|
|
|
|
return "downtime_start";
|
|
|
|
case NotificationDowntimeEnd:
|
|
|
|
return "downtime_end";
|
|
|
|
case NotificationDowntimeRemoved:
|
|
|
|
return "downtime_removed";
|
|
|
|
case NotificationCustom:
|
|
|
|
return "custom";
|
|
|
|
case NotificationAcknowledgement:
|
|
|
|
return "acknowledgement";
|
|
|
|
case NotificationProblem:
|
|
|
|
return "problem";
|
|
|
|
case NotificationRecovery:
|
|
|
|
return "recovery";
|
|
|
|
case NotificationFlappingStart:
|
|
|
|
return "flapping_start";
|
|
|
|
case NotificationFlappingEnd:
|
|
|
|
return "flapping_end";
|
|
|
|
}
|
|
|
|
|
|
|
|
VERIFY(!"Invalid notification type.");
|
|
|
|
}
|
|
|
|
|
2018-06-05 16:46:02 +02:00
|
|
|
static const std::set<String> propertiesBlacklistEmpty;
|
|
|
|
|
2019-10-29 17:32:29 +01:00
|
|
|
String IcingaDB::HashValue(const Value& value)
|
2018-06-05 16:46:02 +02:00
|
|
|
{
|
|
|
|
return HashValue(value, propertiesBlacklistEmpty);
|
|
|
|
}
|
|
|
|
|
2019-10-29 17:32:29 +01:00
|
|
|
String IcingaDB::HashValue(const Value& value, const std::set<String>& propertiesBlacklist, bool propertiesWhitelist)
|
2017-09-25 14:41:43 +02:00
|
|
|
{
|
|
|
|
Value temp;
|
2018-06-05 16:46:02 +02:00
|
|
|
bool mutabl;
|
2017-09-25 14:41:43 +02:00
|
|
|
|
|
|
|
Type::Ptr type = value.GetReflectionType();
|
|
|
|
|
2018-06-05 16:46:02 +02:00
|
|
|
if (ConfigObject::TypeInstance->IsAssignableFrom(type)) {
|
2017-09-25 14:41:43 +02:00
|
|
|
temp = Serialize(value, FAConfig);
|
2018-06-05 16:46:02 +02:00
|
|
|
mutabl = true;
|
|
|
|
} else {
|
2017-09-25 14:41:43 +02:00
|
|
|
temp = value;
|
2018-06-05 16:46:02 +02:00
|
|
|
mutabl = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (propertiesBlacklist.size() && temp.IsObject()) {
|
|
|
|
Dictionary::Ptr dict = dynamic_pointer_cast<Dictionary>((Object::Ptr)temp);
|
|
|
|
|
|
|
|
if (dict) {
|
|
|
|
if (!mutabl)
|
|
|
|
dict = dict->ShallowClone();
|
|
|
|
|
|
|
|
ObjectLock olock(dict);
|
2018-06-07 12:49:38 +02:00
|
|
|
|
|
|
|
if (propertiesWhitelist) {
|
|
|
|
auto current = dict->Begin();
|
|
|
|
auto propertiesBlacklistEnd = propertiesBlacklist.end();
|
|
|
|
|
|
|
|
while (current != dict->End()) {
|
|
|
|
if (propertiesBlacklist.find(current->first) == propertiesBlacklistEnd) {
|
|
|
|
dict->Remove(current++);
|
|
|
|
} else {
|
|
|
|
++current;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for (auto& property : propertiesBlacklist)
|
|
|
|
dict->Remove(property);
|
|
|
|
}
|
2018-06-05 16:46:02 +02:00
|
|
|
|
|
|
|
if (!mutabl)
|
|
|
|
temp = dict;
|
|
|
|
}
|
|
|
|
}
|
2017-09-25 14:41:43 +02:00
|
|
|
|
2018-06-05 15:01:43 +02:00
|
|
|
return SHA1(PackObject(temp));
|
2017-09-25 14:41:43 +02:00
|
|
|
}
|
|
|
|
|
2019-10-29 17:32:29 +01:00
|
|
|
String IcingaDB::GetLowerCaseTypeNameDB(const ConfigObject::Ptr& obj)
|
2018-10-29 13:48:15 +01:00
|
|
|
{
|
2019-10-10 13:05:16 +02:00
|
|
|
return obj->GetReflectionType()->GetName().ToLower();
|
2018-10-29 13:48:15 +01:00
|
|
|
}
|
2019-10-09 14:08:03 +02:00
|
|
|
|
2019-10-29 17:32:29 +01:00
|
|
|
long long IcingaDB::TimestampToMilliseconds(double timestamp) {
|
2019-10-09 14:08:03 +02:00
|
|
|
return static_cast<long long>(timestamp * 1000);
|
|
|
|
}
|
2021-04-22 09:09:54 +02:00
|
|
|
|
|
|
|
String IcingaDB::IcingaToStreamValue(const Value& value)
|
|
|
|
{
|
|
|
|
switch (value.GetType()) {
|
|
|
|
case ValueBoolean:
|
2021-04-22 09:15:55 +02:00
|
|
|
return Convert::ToString(int(value));
|
2021-04-22 09:09:54 +02:00
|
|
|
case ValueString:
|
|
|
|
return Utility::ValidateUTF8(value);
|
|
|
|
case ValueNumber:
|
|
|
|
case ValueEmpty:
|
|
|
|
return Convert::ToString(value);
|
|
|
|
default:
|
|
|
|
return JsonEncode(value);
|
|
|
|
}
|
|
|
|
}
|
2021-10-08 16:43:09 +02:00
|
|
|
|
|
|
|
// Returns the items that exist in "arrayOld" but not in "arrayNew"
|
|
|
|
std::vector<Value> IcingaDB::GetArrayDeletedValues(const Array::Ptr& arrayOld, const Array::Ptr& arrayNew) {
|
|
|
|
std::vector<Value> deletedValues;
|
|
|
|
|
|
|
|
if (!arrayOld) {
|
|
|
|
return deletedValues;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!arrayNew) {
|
2022-03-09 14:29:44 +01:00
|
|
|
ObjectLock olock (arrayOld);
|
2021-10-08 16:43:09 +02:00
|
|
|
return std::vector<Value>(arrayOld->Begin(), arrayOld->End());
|
|
|
|
}
|
|
|
|
|
2022-03-09 14:29:44 +01:00
|
|
|
std::vector<Value> vectorOld;
|
|
|
|
{
|
|
|
|
ObjectLock olock (arrayOld);
|
|
|
|
vectorOld.assign(arrayOld->Begin(), arrayOld->End());
|
|
|
|
}
|
2021-10-08 16:43:09 +02:00
|
|
|
std::sort(vectorOld.begin(), vectorOld.end());
|
|
|
|
vectorOld.erase(std::unique(vectorOld.begin(), vectorOld.end()), vectorOld.end());
|
|
|
|
|
2022-03-09 14:29:44 +01:00
|
|
|
std::vector<Value> vectorNew;
|
|
|
|
{
|
|
|
|
ObjectLock olock (arrayNew);
|
|
|
|
vectorNew.assign(arrayNew->Begin(), arrayNew->End());
|
|
|
|
}
|
2021-10-08 16:43:09 +02:00
|
|
|
std::sort(vectorNew.begin(), vectorNew.end());
|
|
|
|
vectorNew.erase(std::unique(vectorNew.begin(), vectorNew.end()), vectorNew.end());
|
|
|
|
|
|
|
|
std::set_difference(vectorOld.begin(), vectorOld.end(), vectorNew.begin(), vectorNew.end(), std::back_inserter(deletedValues));
|
|
|
|
|
|
|
|
return deletedValues;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the keys that exist in "dictOld" but not in "dictNew"
|
|
|
|
std::vector<String> IcingaDB::GetDictionaryDeletedKeys(const Dictionary::Ptr& dictOld, const Dictionary::Ptr& dictNew) {
|
|
|
|
std::vector<String> deletedKeys;
|
|
|
|
|
|
|
|
if (!dictOld) {
|
|
|
|
return deletedKeys;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<String> oldKeys = dictOld->GetKeys();
|
|
|
|
|
|
|
|
if (!dictNew) {
|
|
|
|
return oldKeys;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<String> newKeys = dictNew->GetKeys();
|
|
|
|
|
|
|
|
std::set_difference(oldKeys.begin(), oldKeys.end(), newKeys.begin(), newKeys.end(), std::back_inserter(deletedKeys));
|
|
|
|
|
|
|
|
return deletedKeys;
|
|
|
|
}
|