Skip to content

Commit

Permalink
feat: add basic auth and authorization for prometheus scrape config (a…
Browse files Browse the repository at this point in the history
  • Loading branch information
catdogpandas authored Aug 23, 2024
1 parent 51b19d7 commit f8cec6e
Show file tree
Hide file tree
Showing 6 changed files with 322 additions and 58 deletions.
9 changes: 9 additions & 0 deletions core/prometheus/Constants.h
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,18 @@ const char* const SERIES_LIMIT = "series_limit";
const char* const MAX_SCRAPE_SIZE = "max_scrape_size";
const char* const METRIC_RELABEL_CONFIGS = "metric_relabel_configs";
const char* const AUTHORIZATION = "authorization";
const char* const AUTHORIZATION_DEFAULT_TYEP = "Bearer";
const char* const A_UTHORIZATION = "Authorization";
const char* const TYPE = "type";
const char* const CREDENTIALS = "credentials";
const char* const CREDENTIALS_FILE = "credentials_file";
const char* const BASIC_AUTH = "basic_auth";
const char* const USERNAME = "username";
const char* const USERNAME_FILE = "username_file";
const char* const PASSWORD = "password";
const char* const PASSWORD_FILE = "password_file";
const char* const BASIC_PREFIX = "Basic ";


// metric labels
const char* const JOB = "job";
Expand Down
131 changes: 108 additions & 23 deletions core/prometheus/schedulers/ScrapeConfig.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,16 @@
#include "logger/Logger.h"
#include "prometheus/Constants.h"
#include "prometheus/Utils.h"
#include "sdk/Common.h"

using namespace std;

namespace logtail {
ScrapeConfig::ScrapeConfig()
: mScheme("http"),
mMetricsPath("/metrics"),
mScrapeIntervalSeconds(60),
: mScrapeIntervalSeconds(60),
mScrapeTimeoutSeconds(10),
mMetricsPath("/metrics"),
mScheme("http"),
mMaxScrapeSizeBytes(-1),
mSampleLimit(-1),
mSeriesLimit(-1) {
Expand All @@ -33,12 +34,7 @@ bool ScrapeConfig::Init(const Json::Value& scrapeConfig) {
} else {
return false;
}
if (scrapeConfig.isMember(prometheus::SCHEME) && scrapeConfig[prometheus::SCHEME].isString()) {
mScheme = scrapeConfig[prometheus::SCHEME].asString();
}
if (scrapeConfig.isMember(prometheus::METRICS_PATH) && scrapeConfig[prometheus::METRICS_PATH].isString()) {
mMetricsPath = scrapeConfig[prometheus::METRICS_PATH].asString();
}

if (scrapeConfig.isMember(prometheus::SCRAPE_INTERVAL) && scrapeConfig[prometheus::SCRAPE_INTERVAL].isString()) {
string tmpScrapeIntervalString = scrapeConfig[prometheus::SCRAPE_INTERVAL].asString();
mScrapeIntervalSeconds = DurationToSecond(tmpScrapeIntervalString);
Expand All @@ -47,6 +43,32 @@ bool ScrapeConfig::Init(const Json::Value& scrapeConfig) {
string tmpScrapeTimeoutString = scrapeConfig[prometheus::SCRAPE_TIMEOUT].asString();
mScrapeTimeoutSeconds = DurationToSecond(tmpScrapeTimeoutString);
}
if (scrapeConfig.isMember(prometheus::METRICS_PATH) && scrapeConfig[prometheus::METRICS_PATH].isString()) {
mMetricsPath = scrapeConfig[prometheus::METRICS_PATH].asString();
}
if (scrapeConfig.isMember(prometheus::SCHEME) && scrapeConfig[prometheus::SCHEME].isString()) {
mScheme = scrapeConfig[prometheus::SCHEME].asString();
}

// basic auth, authorization, oauth2
// basic auth, authorization, oauth2 cannot be used at the same time
if ((int)scrapeConfig.isMember(prometheus::BASIC_AUTH) + scrapeConfig.isMember(prometheus::AUTHORIZATION) > 1) {
LOG_ERROR(sLogger, ("basic auth and authorization cannot be used at the same time", ""));
return false;
}
if (scrapeConfig.isMember(prometheus::BASIC_AUTH) && scrapeConfig[prometheus::BASIC_AUTH].isObject()) {
if (!InitBasicAuth(scrapeConfig[prometheus::BASIC_AUTH])) {
LOG_ERROR(sLogger, ("basic auth config error", ""));
return false;
}
}
if (scrapeConfig.isMember(prometheus::AUTHORIZATION) && scrapeConfig[prometheus::AUTHORIZATION].isObject()) {
if (!InitAuthorization(scrapeConfig[prometheus::AUTHORIZATION])) {
LOG_ERROR(sLogger, ("authorization config error", ""));
return false;
}
}

// <size>: a size in bytes, e.g. 512MB. A unit is required. Supported units: B, KB, MB, GB, TB, PB, EB.
if (scrapeConfig.isMember(prometheus::MAX_SCRAPE_SIZE) && scrapeConfig[prometheus::MAX_SCRAPE_SIZE].isString()) {
string tmpMaxScrapeSize = scrapeConfig[prometheus::MAX_SCRAPE_SIZE].asString();
Expand Down Expand Up @@ -104,20 +126,6 @@ bool ScrapeConfig::Init(const Json::Value& scrapeConfig) {
}
}

if (scrapeConfig.isMember(prometheus::AUTHORIZATION) && scrapeConfig[prometheus::AUTHORIZATION].isObject()) {
string type = scrapeConfig[prometheus::AUTHORIZATION][prometheus::TYPE].asString();
string bearerToken;
bool b
= ReadFile(scrapeConfig[prometheus::AUTHORIZATION][prometheus::CREDENTIALS_FILE].asString(), bearerToken);
if (!b) {
LOG_ERROR(sLogger,
("read credentials_file failed, credentials_file",
scrapeConfig[prometheus::AUTHORIZATION][prometheus::CREDENTIALS_FILE].asString()));
return false;
}
mHeaders[prometheus::A_UTHORIZATION] = type + " " + bearerToken;
}

for (const auto& relabelConfig : scrapeConfig[prometheus::RELABEL_CONFIGS]) {
mRelabelConfigs.emplace_back(relabelConfig);
}
Expand All @@ -138,4 +146,81 @@ bool ScrapeConfig::Init(const Json::Value& scrapeConfig) {

return true;
}

bool ScrapeConfig::InitBasicAuth(const Json::Value& basicAuth) {
string username;
string usernameFile;
string password;
string passwordFile;
if (basicAuth.isMember(prometheus::USERNAME) && basicAuth[prometheus::USERNAME].isString()) {
username = basicAuth[prometheus::USERNAME].asString();
}
if (basicAuth.isMember(prometheus::USERNAME_FILE) && basicAuth[prometheus::USERNAME_FILE].isString()) {
usernameFile = basicAuth[prometheus::USERNAME_FILE].asString();
}
if (basicAuth.isMember(prometheus::PASSWORD) && basicAuth[prometheus::PASSWORD].isString()) {
password = basicAuth[prometheus::PASSWORD].asString();
}
if (basicAuth.isMember(prometheus::PASSWORD_FILE) && basicAuth[prometheus::PASSWORD_FILE].isString()) {
passwordFile = basicAuth[prometheus::PASSWORD_FILE].asString();
}

if ((username.empty() && usernameFile.empty()) || (password.empty() && passwordFile.empty())) {
LOG_ERROR(sLogger, ("basic auth username or password is empty", ""));
return false;
}
if ((!username.empty() && !usernameFile.empty()) || (!password.empty() && !passwordFile.empty())) {
LOG_ERROR(sLogger, ("basic auth config error", ""));
return false;
}
if (!usernameFile.empty() && !ReadFile(usernameFile, username)) {
LOG_ERROR(sLogger, ("read username_file failed, username_file", usernameFile));
return false;
}

if (!passwordFile.empty() && !ReadFile(passwordFile, password)) {
LOG_ERROR(sLogger, ("read password_file failed, password_file", passwordFile));
return false;
}

auto token = username + ":" + password;
auto token64 = sdk::Base64Enconde(token);
mAuthHeaders[prometheus::A_UTHORIZATION] = prometheus::BASIC_PREFIX + token64;
return true;
}

bool ScrapeConfig::InitAuthorization(const Json::Value& authorization) {
string type;
string credentials;
string credentialsFile;

if (authorization.isMember(prometheus::TYPE) && authorization[prometheus::TYPE].isString()) {
type = authorization[prometheus::TYPE].asString();
}
// if not set, use default type Bearer
if (type.empty()) {
type = prometheus::AUTHORIZATION_DEFAULT_TYEP;
}

if (authorization.isMember(prometheus::CREDENTIALS) && authorization[prometheus::CREDENTIALS].isString()) {
credentials = authorization[prometheus::CREDENTIALS].asString();
}
if (authorization.isMember(prometheus::CREDENTIALS_FILE)
&& authorization[prometheus::CREDENTIALS_FILE].isString()) {
credentialsFile = authorization[prometheus::CREDENTIALS_FILE].asString();
}
if (!credentials.empty() && !credentialsFile.empty()) {
LOG_ERROR(sLogger, ("authorization config error", ""));
return false;
}

if (!credentialsFile.empty() && !ReadFile(credentialsFile, credentials)) {
LOG_ERROR(sLogger, ("authorization read file error", ""));
return false;
}

mAuthHeaders[prometheus::A_UTHORIZATION] = type + " " + credentials;
return true;
}

} // namespace logtail
14 changes: 11 additions & 3 deletions core/prometheus/schedulers/ScrapeConfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,28 +11,36 @@


namespace logtail {

class ScrapeConfig {
public:
std::string mJobName;
std::string mScheme;
std::string mMetricsPath;
int64_t mScrapeIntervalSeconds;
int64_t mScrapeTimeoutSeconds;
std::string mMetricsPath;
std::string mScheme;

std::map<std::string, std::string> mAuthHeaders;

int64_t mMaxScrapeSizeBytes;
int64_t mSampleLimit;
int64_t mSeriesLimit;
std::vector<RelabelConfig> mRelabelConfigs;

std::map<std::string, std::vector<std::string>> mParams;
std::map<std::string, std::string> mHeaders;

std::string mQueryString;

ScrapeConfig();
bool Init(const Json::Value& config);

private:
bool InitBasicAuth(const Json::Value& basicAuth);
bool InitAuthorization(const Json::Value& authorization);

#ifdef APSARA_UNIT_TEST_MAIN
friend class ScrapeConfigUnittest;
#endif
};

} // namespace logtail
4 changes: 2 additions & 2 deletions core/prometheus/schedulers/ScrapeScheduler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ void ScrapeScheduler::OnMetricResult(const HttpResponse& response, uint64_t time
if (response.mStatusCode != 200) {
mScrapeResponseSizeBytes = 0;
string headerStr;
for (const auto& [k, v] : mScrapeConfigPtr->mHeaders) {
for (const auto& [k, v] : mScrapeConfigPtr->mAuthHeaders) {
headerStr.append(k).append(":").append(v).append(";");
}
LOG_WARNING(sLogger,
Expand Down Expand Up @@ -159,7 +159,7 @@ std::unique_ptr<TimerEvent> ScrapeScheduler::BuildScrapeTimerEvent(std::chrono::
mPort,
mScrapeConfigPtr->mMetricsPath,
mScrapeConfigPtr->mQueryString,
mScrapeConfigPtr->mHeaders,
mScrapeConfigPtr->mAuthHeaders,
"",
mScrapeConfigPtr->mScrapeTimeoutSeconds,
mScrapeConfigPtr->mScrapeIntervalSeconds
Expand Down
Loading

0 comments on commit f8cec6e

Please sign in to comment.