Skip to content

Commit

Permalink
[Reporting] Add context to logging about Space ID handling (elastic#8…
Browse files Browse the repository at this point in the history
…0106)

* [Reporting] Logger Fixes

* info level to debug level for default space message

* fix context for custom logo

* fix tags

* Update x-pack/plugins/reporting/server/core.ts

Co-authored-by: Joel Griffith <[email protected]>

* rm whitespace

* one more backtick string

Co-authored-by: Kibana Machine <[email protected]>
Co-authored-by: Joel Griffith <[email protected]>
# Conflicts:
#	x-pack/plugins/reporting/server/export_types/printable_pdf/create_job/index.ts
  • Loading branch information
tsullivan committed Oct 14, 2020
1 parent ea70c68 commit f01f9cc
Show file tree
Hide file tree
Showing 9 changed files with 51 additions and 32 deletions.
16 changes: 8 additions & 8 deletions x-pack/plugins/reporting/server/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -195,21 +195,21 @@ export class ReportingCore {
return scopedUiSettingsService;
}

public getSpaceId(request: KibanaRequest): string | undefined {
public getSpaceId(request: KibanaRequest, logger = this.logger): string | undefined {
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
if (spacesService) {
const spaceId = spacesService?.getSpaceId(request);

if (spaceId !== DEFAULT_SPACE_ID) {
this.logger.info(`Request uses Space ID: ` + spaceId);
logger.info(`Request uses Space ID: ${spaceId}`);
return spaceId;
} else {
this.logger.info(`Request uses default Space`);
logger.debug(`Request uses default Space`);
}
}
}

public getFakeRequest(baseRequest: object, spaceId?: string) {
public getFakeRequest(baseRequest: object, spaceId: string | undefined, logger = this.logger) {
const fakeRequest = KibanaRequest.from({
path: '/',
route: { settings: {} },
Expand All @@ -221,19 +221,19 @@ export class ReportingCore {
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
if (spacesService) {
if (spaceId && spaceId !== DEFAULT_SPACE_ID) {
this.logger.info(`Generating request for space: ` + spaceId);
logger.info(`Generating request for space: ${spaceId}`);
this.getPluginSetupDeps().basePath.set(fakeRequest, `/s/${spaceId}`);
}
}

return fakeRequest;
}

public async getUiSettingsClient(request: KibanaRequest) {
public async getUiSettingsClient(request: KibanaRequest, logger = this.logger) {
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
const spaceId = this.getSpaceId(request);
const spaceId = this.getSpaceId(request, logger);
if (spacesService && spaceId) {
this.logger.info(`Creating UI Settings Client for space: ${spaceId}`);
logger.info(`Creating UI Settings Client for space: ${spaceId}`);
}
const savedObjectsClient = await this.getSavedObjectsClient(request);
return await this.getUiSettingsServiceFactory(savedObjectsClient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,17 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { CSV_JOB_TYPE } from '../../../constants';
import { cryptoFactory } from '../../lib';
import { CreateJobFn, CreateJobFnFactory } from '../../types';
import { IndexPatternSavedObject, JobParamsCSV, TaskPayloadCSV } from './types';

export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<
JobParamsCSV,
TaskPayloadCSV
>> = function createJobFactoryFn(reporting) {
>> = function createJobFactoryFn(reporting, parentLogger) {
const logger = parentLogger.clone([CSV_JOB_TYPE, 'create-job']);

const config = reporting.getConfig();
const crypto = cryptoFactory(config.get('encryptionKey'));

Expand All @@ -26,7 +29,7 @@ export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<

return {
headers: serializedEncryptedHeaders,
spaceId: reporting.getSpaceId(request),
spaceId: reporting.getSpaceId(request, logger),
indexPatternSavedObject,
...jobParams,
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,16 @@ export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<
TaskPayloadCSV
>> = function executeJobFactoryFn(reporting, parentLogger) {
const config = reporting.getConfig();
const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']);

return async function runTask(jobId, job, cancellationToken) {
const elasticsearch = reporting.getElasticsearchService();
const jobLogger = logger.clone([jobId]);
const generateCsv = createGenerateCsv(jobLogger);
const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job', jobId]);
const generateCsv = createGenerateCsv(logger);

const encryptionKey = config.get('encryptionKey');
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
const fakeRequest = reporting.getFakeRequest({ headers }, job.spaceId);
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest);
const fakeRequest = reporting.getFakeRequest({ headers }, job.spaceId, logger);
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest, logger);

const { callAsCurrentUser } = elasticsearch.legacy.client.asScoped(fakeRequest);
const callEndpoint = (endpoint: string, clientParams = {}, options = {}) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,10 @@ export const runTaskFnFactory: RunTaskFnFactory<ImmediateExecuteFn> = function e
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']);

return async function runTask(jobId, jobPayload, context, req) {
const jobLogger = logger.clone(['immediate']);
const generateCsv = createGenerateCsv(jobLogger);
const generateCsv = createGenerateCsv(logger);
const { panel, visType } = jobPayload;

jobLogger.debug(`Execute job generating [${visType}] csv`);
logger.debug(`Execute job generating [${visType}] csv`);

const savedObjectsClient = context.core.savedObjects.client;
const uiSettingsClient = await reporting.getUiSettingsServiceFactory(savedObjectsClient);
Expand All @@ -54,11 +53,11 @@ export const runTaskFnFactory: RunTaskFnFactory<ImmediateExecuteFn> = function e
);

if (csvContainsFormulas) {
jobLogger.warn(`CSV may contain formulas whose values have been escaped`);
logger.warn(`CSV may contain formulas whose values have been escaped`);
}

if (maxSizeReached) {
jobLogger.warn(`Max size reached: CSV output truncated to ${size} bytes`);
logger.warn(`Max size reached: CSV output truncated to ${size} bytes`);
}

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { PNG_JOB_TYPE } from '../../../../constants';
import { cryptoFactory } from '../../../lib';
import { CreateJobFn, CreateJobFnFactory } from '../../../types';
import { validateUrls } from '../../common';
Expand All @@ -12,7 +13,8 @@ import { JobParamsPNG, TaskPayloadPNG } from '../types';
export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<
JobParamsPNG,
TaskPayloadPNG
>> = function createJobFactoryFn(reporting) {
>> = function createJobFactoryFn(reporting, parentLogger) {
const logger = parentLogger.clone([PNG_JOB_TYPE, 'execute-job']);
const config = reporting.getConfig();
const crypto = cryptoFactory(config.get('encryptionKey'));

Expand All @@ -27,7 +29,7 @@ export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<

return {
headers: serializedEncryptedHeaders,
spaceId: reporting.getSpaceId(req),
spaceId: reporting.getSpaceId(req, logger),
objectType,
title,
relativeUrl,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { PDF_JOB_TYPE } from '../../../../constants';
import { cryptoFactory } from '../../../lib';
import { CreateJobFn, CreateJobFnFactory } from '../../../types';
import { validateUrls } from '../../common';
Expand All @@ -15,9 +16,10 @@ import { compatibilityShimFactory } from './compatibility_shim';
export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<
JobParamsPDF,
TaskPayloadPDF
>> = function createJobFactoryFn(reporting, logger) {
>> = function createJobFactoryFn(reporting, parentLogger) {
const config = reporting.getConfig();
const crypto = cryptoFactory(config.get('encryptionKey'));
const logger = parentLogger.clone([PDF_JOB_TYPE, 'create-job']);
const compatibilityShim = compatibilityShimFactory(logger);

// 7.x and below only
Expand All @@ -32,7 +34,7 @@ export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<

return {
headers: serializedEncryptedHeaders,
spaceId: reporting.getSpaceId(req),
spaceId: reporting.getSpaceId(req, logger),
browserTimezone,
forceNow: new Date().toISOString(),
layout,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,8 @@ export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<
const config = reporting.getConfig();
const encryptionKey = config.get('encryptionKey');

const logger = parentLogger.clone([PDF_JOB_TYPE, 'execute']);

return async function runTask(jobId, job, cancellationToken) {
const logger = parentLogger.clone([PDF_JOB_TYPE, 'execute-job', jobId]);
const apmTrans = apm.startTransaction('reporting execute_job pdf', 'reporting');
const apmGetAssets = apmTrans?.startSpan('get_assets', 'setup');
let apmGeneratePdf: { end: () => void } | null | undefined;
Expand All @@ -40,7 +39,9 @@ export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<
mergeMap(() => decryptJobHeaders(encryptionKey, job.headers, logger)),
map((decryptedHeaders) => omitBlockedHeaders(decryptedHeaders)),
map((filteredHeaders) => getConditionalHeaders(config, filteredHeaders)),
mergeMap((conditionalHeaders) => getCustomLogo(reporting, conditionalHeaders, job.spaceId)),
mergeMap((conditionalHeaders) =>
getCustomLogo(reporting, conditionalHeaders, job.spaceId, logger)
),
mergeMap(({ logo, conditionalHeaders }) => {
const urls = getFullUrls(config, job);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import { ReportingConfig, ReportingCore } from '../../../';
import {
createMockConfig,
createMockConfigSchema,
createMockLevelLogger,
createMockReportingCore,
} from '../../../test_helpers';
import { getConditionalHeaders } from '../../common';
Expand All @@ -16,6 +17,8 @@ import { getCustomLogo } from './get_custom_logo';
let mockConfig: ReportingConfig;
let mockReportingPlugin: ReportingCore;

const logger = createMockLevelLogger();

beforeEach(async () => {
mockConfig = createMockConfig(createMockConfigSchema());
mockReportingPlugin = await createMockReportingCore(mockConfig);
Expand All @@ -40,7 +43,12 @@ test(`gets logo from uiSettings`, async () => {

const conditionalHeaders = getConditionalHeaders(mockConfig, permittedHeaders);

const { logo } = await getCustomLogo(mockReportingPlugin, conditionalHeaders);
const { logo } = await getCustomLogo(
mockReportingPlugin,
conditionalHeaders,
'spaceyMcSpaceIdFace',
logger
);

expect(mockGet).toBeCalledWith('xpackReporting:customPdfLogo');
expect(logo).toBe('purple pony');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,21 @@

import { ReportingCore } from '../../../';
import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../../common/constants';
import { LevelLogger } from '../../../lib';
import { ConditionalHeaders } from '../../common';

export const getCustomLogo = async (
reporting: ReportingCore,
conditionalHeaders: ConditionalHeaders,
spaceId?: string
spaceId: string | undefined,
logger: LevelLogger
) => {
const fakeRequest = reporting.getFakeRequest({ headers: conditionalHeaders.headers }, spaceId);
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest);

const fakeRequest = reporting.getFakeRequest(
{ headers: conditionalHeaders.headers },
spaceId,
logger
);
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest, logger);
const logo: string = await uiSettingsClient.get(UI_SETTINGS_CUSTOM_PDF_LOGO);

// continue the pipeline
Expand Down

0 comments on commit f01f9cc

Please sign in to comment.