diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.test.tsx index 76622f9c12822..7511f4ae2c2c3 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.test.tsx @@ -12,8 +12,9 @@ import React from 'react'; import { shallow, ShallowWrapper } from 'enzyme'; -import { EuiBasicTable, EuiButtonIcon, EuiInMemoryTable } from '@elastic/eui'; +import { EuiBasicTable, EuiButtonIcon } from '@elastic/eui'; +import { DEFAULT_META } from '../../../../shared/constants'; import { mountWithIntl } from '../../../../test_helpers'; import { CrawlerDomain } from '../types'; @@ -51,15 +52,19 @@ const domains: CrawlerDomain[] = [ const values = { // EngineLogic engineName: 'some-engine', - // CrawlerOverviewLogic + // CrawlerDomainsLogic domains, + meta: DEFAULT_META, + dataLoading: false, // AppLogic myRole: { canManageEngineCrawler: false }, }; const actions = { - // CrawlerOverviewLogic + // CrawlerDomainsLogic deleteDomain: jest.fn(), + fetchCrawlerDomainsData: jest.fn(), + onPaginate: jest.fn(), }; describe('DomainsTable', () => { @@ -69,17 +74,28 @@ describe('DomainsTable', () => { beforeEach(() => { jest.clearAllMocks(); }); + beforeAll(() => { setMockValues(values); setMockActions(actions); wrapper = shallow(); tableContent = mountWithIntl() - .find(EuiInMemoryTable) + .find(EuiBasicTable) .text(); }); it('renders', () => { - expect(wrapper.find(EuiInMemoryTable)).toHaveLength(1); + expect(wrapper.find(EuiBasicTable)).toHaveLength(1); + + expect(wrapper.find(EuiBasicTable).prop('pagination')).toEqual({ + hidePerPageOptions: true, + pageIndex: 0, + pageSize: 10, + totalItemCount: 0, + }); + + wrapper.find(EuiBasicTable).simulate('change', { page: { index: 2 } }); + expect(actions.onPaginate).toHaveBeenCalledWith(3); }); describe('columns', () => { @@ -88,7 +104,7 @@ describe('DomainsTable', () => { }); it('renders a clickable domain url', () => { - const basicTable = wrapper.find(EuiInMemoryTable).dive().find(EuiBasicTable).dive(); + const basicTable = wrapper.find(EuiBasicTable).dive(); const link = basicTable.find('[data-test-subj="CrawlerDomainURL"]').at(0); expect(link.dive().text()).toContain('elastic.co'); @@ -110,7 +126,7 @@ describe('DomainsTable', () => { }); describe('actions column', () => { - const getTable = () => wrapper.find(EuiInMemoryTable).dive().find(EuiBasicTable).dive(); + const getTable = () => wrapper.find(EuiBasicTable).dive(); const getActions = () => getTable().find('ExpandedItemActions'); const getActionItems = () => getActions().first().dive().find('DefaultItemAction'); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.tsx index 1f0f6be22102f..b8d8159be7b16 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/domains_table.tsx @@ -5,11 +5,11 @@ * 2.0. */ -import React from 'react'; +import React, { useEffect } from 'react'; import { useActions, useValues } from 'kea'; -import { EuiInMemoryTable, EuiBasicTableColumn } from '@elastic/eui'; +import { EuiBasicTableColumn, EuiBasicTable } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; @@ -18,11 +18,11 @@ import { FormattedNumber } from '@kbn/i18n/react'; import { DELETE_BUTTON_LABEL, MANAGE_BUTTON_LABEL } from '../../../../shared/constants'; import { KibanaLogic } from '../../../../shared/kibana'; import { EuiLinkTo } from '../../../../shared/react_router_helpers'; +import { convertMetaToPagination, handlePageChange } from '../../../../shared/table_pagination'; import { AppLogic } from '../../../app_logic'; import { ENGINE_CRAWLER_DOMAIN_PATH } from '../../../routes'; import { generateEnginePath } from '../../engine'; -import { CrawlerLogic } from '../crawler_logic'; -import { CrawlerOverviewLogic } from '../crawler_overview_logic'; +import { CrawlerDomainsLogic } from '../crawler_domains_logic'; import { CrawlerDomain } from '../types'; import { getDeleteDomainConfirmationMessage } from '../utils'; @@ -30,9 +30,12 @@ import { getDeleteDomainConfirmationMessage } from '../utils'; import { CustomFormattedTimestamp } from './custom_formatted_timestamp'; export const DomainsTable: React.FC = () => { - const { domains } = useValues(CrawlerLogic); + const { domains, meta, dataLoading } = useValues(CrawlerDomainsLogic); + const { fetchCrawlerDomainsData, onPaginate, deleteDomain } = useActions(CrawlerDomainsLogic); - const { deleteDomain } = useActions(CrawlerOverviewLogic); + useEffect(() => { + fetchCrawlerDomainsData(); + }, [meta.page.current]); const { myRole: { canManageEngineCrawler }, @@ -125,5 +128,16 @@ export const DomainsTable: React.FC = () => { columns.push(actionsColumn); } - return ; + return ( + + ); }; diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_domains_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_domains_logic.test.ts new file mode 100644 index 0000000000000..6cf2f21fc6d2e --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_domains_logic.test.ts @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + LogicMounter, + mockHttpValues, + mockFlashMessageHelpers, +} from '../../../__mocks__/kea_logic'; +import '../../__mocks__/engine_logic.mock'; + +import { nextTick } from '@kbn/test/jest'; + +import { Meta } from '../../../../../common/types'; + +import { DEFAULT_META } from '../../../shared/constants'; + +import { CrawlerDomainsLogic, CrawlerDomainsValues } from './crawler_domains_logic'; +import { CrawlerDataFromServer, CrawlerDomain, CrawlerDomainFromServer } from './types'; +import { crawlerDataServerToClient } from './utils'; + +const DEFAULT_VALUES: CrawlerDomainsValues = { + dataLoading: true, + domains: [], + meta: DEFAULT_META, +}; + +const crawlerDataResponse: CrawlerDataFromServer = { + domains: [ + { + id: '507f1f77bcf86cd799439011', + name: 'elastic.co', + created_on: 'Mon, 31 Aug 2020 17:00:00 +0000', + document_count: 13, + sitemaps: [], + entry_points: [], + crawl_rules: [], + deduplication_enabled: false, + deduplication_fields: ['title'], + available_deduplication_fields: ['title', 'description'], + }, + ], + events: [], + most_recent_crawl_request: null, +}; + +const clientCrawlerData = crawlerDataServerToClient(crawlerDataResponse); + +const domainsFromServer: CrawlerDomainFromServer[] = [ + { + name: 'http://www.example.com', + created_on: 'foo', + document_count: 10, + id: '1', + crawl_rules: [], + entry_points: [], + sitemaps: [], + deduplication_enabled: true, + deduplication_fields: [], + available_deduplication_fields: [], + }, +]; + +const domains: CrawlerDomain[] = [ + { + createdOn: 'foo', + documentCount: 10, + id: '1', + url: 'http://www.example.com', + crawlRules: [], + entryPoints: [], + sitemaps: [], + deduplicationEnabled: true, + deduplicationFields: [], + availableDeduplicationFields: [], + }, +]; + +const meta: Meta = { + page: { + current: 2, + size: 100, + total_pages: 5, + total_results: 500, + }, +}; + +describe('CrawlerDomainsLogic', () => { + const { mount } = new LogicMounter(CrawlerDomainsLogic); + const { http } = mockHttpValues; + const { flashAPIErrors } = mockFlashMessageHelpers; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('has expected default values', () => { + mount(); + expect(CrawlerDomainsLogic.values).toEqual(DEFAULT_VALUES); + }); + + describe('actions', () => { + describe('onReceiveData', () => { + it('sets state from an API call', () => { + mount(); + + CrawlerDomainsLogic.actions.onReceiveData(domains, meta); + + expect(CrawlerDomainsLogic.values).toEqual({ + ...DEFAULT_VALUES, + domains, + meta, + dataLoading: false, + }); + }); + }); + + describe('onPaginate', () => { + it('sets dataLoading to true & sets meta state', () => { + mount({ dataLoading: false }); + CrawlerDomainsLogic.actions.onPaginate(5); + + expect(CrawlerDomainsLogic.values).toEqual({ + ...DEFAULT_VALUES, + dataLoading: true, + meta: { + ...DEFAULT_META, + page: { + ...DEFAULT_META.page, + current: 5, + }, + }, + }); + }); + }); + }); + + describe('listeners', () => { + describe('fetchCrawlerDomainsData', () => { + it('updates logic with data that has been converted from server to client', async () => { + mount(); + jest.spyOn(CrawlerDomainsLogic.actions, 'onReceiveData'); + + http.get.mockReturnValueOnce( + Promise.resolve({ + results: domainsFromServer, + meta, + }) + ); + + CrawlerDomainsLogic.actions.fetchCrawlerDomainsData(); + await nextTick(); + + expect(http.get).toHaveBeenCalledWith( + '/internal/app_search/engines/some-engine/crawler/domains', + { + query: { 'page[current]': 1, 'page[size]': 10 }, + } + ); + expect(CrawlerDomainsLogic.actions.onReceiveData).toHaveBeenCalledWith(domains, meta); + }); + + it('displays any errors to the user', async () => { + mount(); + http.get.mockReturnValueOnce(Promise.reject('error')); + + CrawlerDomainsLogic.actions.fetchCrawlerDomainsData(); + await nextTick(); + + expect(flashAPIErrors).toHaveBeenCalledWith('error'); + }); + }); + + describe('deleteDomain', () => { + it('deletes the domain and then calls crawlerDomainDeleted with the response', async () => { + jest.spyOn(CrawlerDomainsLogic.actions, 'crawlerDomainDeleted'); + http.delete.mockReturnValue(Promise.resolve(crawlerDataResponse)); + + CrawlerDomainsLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain); + await nextTick(); + + expect(http.delete).toHaveBeenCalledWith( + '/internal/app_search/engines/some-engine/crawler/domains/1234', + { + query: { respond_with: 'crawler_details' }, + } + ); + expect(CrawlerDomainsLogic.actions.crawlerDomainDeleted).toHaveBeenCalledWith( + clientCrawlerData + ); + }); + + it('calls flashApiErrors when there is an error', async () => { + http.delete.mockReturnValue(Promise.reject('error')); + + CrawlerDomainsLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain); + await nextTick(); + + expect(flashAPIErrors).toHaveBeenCalledWith('error'); + }); + }); + }); +}); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_domains_logic.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_domains_logic.ts new file mode 100644 index 0000000000000..e26e9528ee1d2 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_domains_logic.ts @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { kea, MakeLogicType } from 'kea'; + +import { Meta } from '../../../../../common/types'; +import { DEFAULT_META } from '../../../shared/constants'; +import { flashAPIErrors } from '../../../shared/flash_messages'; +import { HttpLogic } from '../../../shared/http'; +import { updateMetaPageIndex } from '../../../shared/table_pagination'; +import { EngineLogic } from '../engine'; + +import { + CrawlerData, + CrawlerDataFromServer, + CrawlerDomain, + CrawlerDomainFromServer, +} from './types'; +import { crawlerDataServerToClient, crawlerDomainServerToClient } from './utils'; + +export interface CrawlerDomainsValues { + dataLoading: boolean; + domains: CrawlerDomain[]; + meta: Meta; +} + +interface CrawlerDomainsResponse { + results: CrawlerDomainFromServer[]; + meta: Meta; +} + +interface CrawlerDomainsActions { + deleteDomain(domain: CrawlerDomain): { domain: CrawlerDomain }; + fetchCrawlerDomainsData(): void; + onPaginate(newPageIndex: number): { newPageIndex: number }; + onReceiveData(domains: CrawlerDomain[], meta: Meta): { domains: CrawlerDomain[]; meta: Meta }; + crawlerDomainDeleted(data: CrawlerData): { data: CrawlerData }; +} + +export const CrawlerDomainsLogic = kea>({ + path: ['enterprise_search', 'app_search', 'crawler', 'crawler_domains_logic'], + actions: { + deleteDomain: (domain) => ({ domain }), + fetchCrawlerDomainsData: true, + onReceiveData: (domains, meta) => ({ domains, meta }), + onPaginate: (newPageIndex) => ({ newPageIndex }), + crawlerDomainDeleted: (data) => ({ data }), + }, + reducers: { + dataLoading: [ + true, + { + onReceiveData: () => false, + onPaginate: () => true, + }, + ], + domains: [ + [], + { + onReceiveData: (_, { domains }) => domains, + }, + ], + meta: [ + DEFAULT_META, + { + onReceiveData: (_, { meta }) => meta, + onPaginate: (state, { newPageIndex }) => updateMetaPageIndex(state, newPageIndex), + }, + ], + }, + listeners: ({ actions, values }) => ({ + fetchCrawlerDomainsData: async () => { + const { http } = HttpLogic.values; + const { engineName } = EngineLogic.values; + const { meta } = values; + + const query = { + 'page[current]': meta.page.current, + 'page[size]': meta.page.size, + }; + + try { + const response = await http.get( + `/internal/app_search/engines/${engineName}/crawler/domains`, + { + query, + } + ); + + const domains = response.results.map(crawlerDomainServerToClient); + + actions.onReceiveData(domains, response.meta); + } catch (e) { + flashAPIErrors(e); + } + }, + + deleteDomain: async ({ domain }) => { + const { http } = HttpLogic.values; + const { engineName } = EngineLogic.values; + + try { + const response = await http.delete( + `/internal/app_search/engines/${engineName}/crawler/domains/${domain.id}`, + { + query: { + respond_with: 'crawler_details', + }, + } + ); + + const crawlerData = crawlerDataServerToClient(response); + // Publish for other logic files to listen for + actions.crawlerDomainDeleted(crawlerData); + actions.fetchCrawlerDomainsData(); + } catch (e) { + flashAPIErrors(e); + } + }, + }), +}); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.test.ts index 53c980c9750f5..7ba1adb51bbfb 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.test.ts +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.test.ts @@ -14,6 +14,7 @@ import '../../__mocks__/engine_logic.mock'; import { nextTick } from '@kbn/test/jest'; +import { CrawlerDomainsLogic } from './crawler_domains_logic'; import { CrawlerLogic, CrawlerValues } from './crawler_logic'; import { CrawlerData, @@ -159,6 +160,16 @@ describe('CrawlerLogic', () => { }); describe('listeners', () => { + describe('CrawlerDomainsLogic.actionTypes.crawlerDomainDeleted', () => { + it('updates data in state when a domain is deleted', () => { + jest.spyOn(CrawlerLogic.actions, 'onReceiveCrawlerData'); + CrawlerDomainsLogic.actions.crawlerDomainDeleted(MOCK_CLIENT_CRAWLER_DATA); + expect(CrawlerLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith( + MOCK_CLIENT_CRAWLER_DATA + ); + }); + }); + describe('fetchCrawlerData', () => { it('updates logic with data that has been converted from server to client', async () => { jest.spyOn(CrawlerLogic.actions, 'onReceiveCrawlerData'); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts index d1530c79a6821..08a01af67ece6 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts @@ -12,6 +12,8 @@ import { flashAPIErrors } from '../../../shared/flash_messages'; import { HttpLogic } from '../../../shared/http'; import { EngineLogic } from '../engine'; +import { CrawlerDomainsLogic } from './crawler_domains_logic'; + import { CrawlerData, CrawlerDomain, @@ -166,6 +168,9 @@ export const CrawlerLogic = kea>({ actions.onCreateNewTimeout(timeoutIdId); }, + [CrawlerDomainsLogic.actionTypes.crawlerDomainDeleted]: ({ data }) => { + actions.onReceiveCrawlerData(data); + }, }), events: ({ values }) => ({ beforeUnmount: () => { diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts deleted file mode 100644 index a701c43d4775c..0000000000000 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { - LogicMounter, - mockHttpValues, - mockFlashMessageHelpers, -} from '../../../__mocks__/kea_logic'; -import '../../__mocks__/engine_logic.mock'; - -jest.mock('./crawler_logic', () => ({ - CrawlerLogic: { - actions: { - onReceiveCrawlerData: jest.fn(), - }, - }, -})); - -import { nextTick } from '@kbn/test/jest'; - -import { CrawlerLogic } from './crawler_logic'; -import { CrawlerOverviewLogic } from './crawler_overview_logic'; - -import { CrawlerDataFromServer, CrawlerDomain } from './types'; -import { crawlerDataServerToClient } from './utils'; - -const MOCK_SERVER_CRAWLER_DATA: CrawlerDataFromServer = { - domains: [ - { - id: '507f1f77bcf86cd799439011', - name: 'elastic.co', - created_on: 'Mon, 31 Aug 2020 17:00:00 +0000', - document_count: 13, - sitemaps: [], - entry_points: [], - crawl_rules: [], - deduplication_enabled: false, - deduplication_fields: ['title'], - available_deduplication_fields: ['title', 'description'], - }, - ], - events: [], - most_recent_crawl_request: null, -}; - -const MOCK_CLIENT_CRAWLER_DATA = crawlerDataServerToClient(MOCK_SERVER_CRAWLER_DATA); - -describe('CrawlerOverviewLogic', () => { - const { mount } = new LogicMounter(CrawlerOverviewLogic); - const { http } = mockHttpValues; - const { flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers; - - beforeEach(() => { - jest.clearAllMocks(); - mount(); - }); - - describe('listeners', () => { - describe('deleteDomain', () => { - it('calls onReceiveCrawlerData with retrieved data that has been converted from server to client', async () => { - jest.spyOn(CrawlerLogic.actions, 'onReceiveCrawlerData'); - http.delete.mockReturnValue(Promise.resolve(MOCK_SERVER_CRAWLER_DATA)); - - CrawlerOverviewLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain); - await nextTick(); - - expect(http.delete).toHaveBeenCalledWith( - '/internal/app_search/engines/some-engine/crawler/domains/1234', - { - query: { respond_with: 'crawler_details' }, - } - ); - expect(CrawlerLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith( - MOCK_CLIENT_CRAWLER_DATA - ); - expect(flashSuccessToast).toHaveBeenCalled(); - }); - - it('calls flashApiErrors when there is an error', async () => { - http.delete.mockReturnValue(Promise.reject('error')); - - CrawlerOverviewLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain); - await nextTick(); - - expect(flashAPIErrors).toHaveBeenCalledWith('error'); - }); - }); - }); -}); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.ts deleted file mode 100644 index 605d45effaa24..0000000000000 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.ts +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { kea, MakeLogicType } from 'kea'; - -import { flashAPIErrors, flashSuccessToast } from '../../../shared/flash_messages'; - -import { HttpLogic } from '../../../shared/http'; -import { EngineLogic } from '../engine'; - -import { CrawlerLogic } from './crawler_logic'; -import { CrawlerDataFromServer, CrawlerDomain } from './types'; -import { crawlerDataServerToClient, getDeleteDomainSuccessMessage } from './utils'; - -interface CrawlerOverviewActions { - deleteDomain(domain: CrawlerDomain): { domain: CrawlerDomain }; -} - -export const CrawlerOverviewLogic = kea>({ - path: ['enterprise_search', 'app_search', 'crawler', 'crawler_overview'], - actions: { - deleteDomain: (domain) => ({ domain }), - }, - listeners: () => ({ - deleteDomain: async ({ domain }) => { - const { http } = HttpLogic.values; - const { engineName } = EngineLogic.values; - - try { - const response = await http.delete( - `/internal/app_search/engines/${engineName}/crawler/domains/${domain.id}`, - { - query: { - respond_with: 'crawler_details', - }, - } - ); - const crawlerData = crawlerDataServerToClient(response); - CrawlerLogic.actions.onReceiveCrawlerData(crawlerData); - flashSuccessToast(getDeleteDomainSuccessMessage(domain.url)); - } catch (e) { - flashAPIErrors(e); - } - }, - }), -}); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx index beb1e65af47a4..ed445b923ea2a 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx @@ -28,9 +28,6 @@ const MOCK_VALUES = { domain: { url: 'https://elastic.co', }, - // CrawlerOverviewLogic - domains: [], - crawlRequests: [], }; const MOCK_ACTIONS = { diff --git a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts index 5dff1b934ae5a..01c2ff42fc010 100644 --- a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts +++ b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts @@ -109,6 +109,45 @@ describe('crawler routes', () => { }); }); + describe('GET /internal/app_search/engines/{name}/crawler/domains', () => { + let mockRouter: MockRouter; + + beforeEach(() => { + jest.clearAllMocks(); + mockRouter = new MockRouter({ + method: 'get', + path: '/internal/app_search/engines/{name}/crawler/domains', + }); + + registerCrawlerRoutes({ + ...mockDependencies, + router: mockRouter.router, + }); + }); + + it('creates a request to enterprise search', () => { + expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({ + path: '/api/as/v0/engines/:name/crawler/domains', + }); + }); + + it('validates correctly', () => { + const request = { + params: { name: 'some-engine' }, + query: { + 'page[current]': 5, + 'page[size]': 10, + }, + }; + mockRouter.shouldValidate(request); + }); + + it('fails validation without required params', () => { + const request = { params: {} }; + mockRouter.shouldThrow(request); + }); + }); + describe('POST /internal/app_search/engines/{name}/crawler/crawl_requests/cancel', () => { let mockRouter: MockRouter; diff --git a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts index 72a48a013636c..9336d9ac93e70 100644 --- a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts +++ b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts @@ -69,6 +69,24 @@ export function registerCrawlerRoutes({ }) ); + router.get( + { + path: '/internal/app_search/engines/{name}/crawler/domains', + validate: { + params: schema.object({ + name: schema.string(), + }), + query: schema.object({ + 'page[current]': schema.number(), + 'page[size]': schema.number(), + }), + }, + }, + enterpriseSearchRequestHandler.createRequest({ + path: '/api/as/v0/engines/:name/crawler/domains', + }) + ); + router.post( { path: '/internal/app_search/engines/{name}/crawler/domains',