Skip to content

Commit

Permalink
Add pagination support in xata pull (#1477)
Browse files Browse the repository at this point in the history
  • Loading branch information
SferaDev authored May 23, 2024
1 parent e109eee commit af38c4b
Show file tree
Hide file tree
Showing 10 changed files with 5,068 additions and 2,989 deletions.
22 changes: 12 additions & 10 deletions cli/src/commands/pull/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,20 +51,22 @@ export default class Pull extends BaseCommand<typeof Pull> {

const details = await getBranchDetailsWithPgRoll(xata, { workspace, region, database, branch });

let logs: Schemas.MigrationHistoryItem[] | Schemas.Commit[] = [];
let logs: (Schemas.MigrationHistoryItem | Schemas.Commit)[] = [];
let cursor = undefined;
if (isBranchPgRollEnabled(details)) {
const { migrations } = await xata.api.migrations.getMigrationHistory({
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` }
});
logs = migrations;
do {
const { migrations, cursor: newCursor } = await xata.api.migrations.getMigrationHistory({
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
queryParams: { cursor, limit: 200 }
});

logs = logs.concat(migrations);
cursor = newCursor;
} while (cursor !== undefined);
} else {
const data = await xata.api.migrations.getBranchSchemaHistory({
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
body: {
// TODO: Fix pagination in the API to start from last known migration and not from the beginning
// Also paginate until we get all migrations
page: { size: 200 }
}
body: { page: { size: 200 } }
});
logs = data.logs;
}
Expand Down
4 changes: 2 additions & 2 deletions cli/src/commands/pull/pull.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ const pgrollFetchSingle = (url: string, request: any) => {
schema: { tables: [{ name: 'table1', columns: [{ name: 'a', type: 'string' }] }] }
})
};
} else if (url === `${baseUrl}/migrations/history` && request.method === 'GET') {
} else if (url === `${baseUrl}/migrations/history?limit=200` && request.method === 'GET') {
return {
ok: true,
json: async () => ({
Expand All @@ -251,7 +251,7 @@ const pgrollFetchMultiple = (url: string, request: any) => {
schema: { tables: [{ name: 'table1', columns: [{ name: 'a', type: 'string' }] }] }
})
};
} else if (url === `${baseUrl}/migrations/history` && request.method === 'GET') {
} else if (url === `${baseUrl}/migrations/history?limit=200` && request.method === 'GET') {
return {
ok: true,
json: async () => ({
Expand Down
22 changes: 12 additions & 10 deletions cli/src/commands/push/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,20 +48,22 @@ export default class Push extends BaseCommand<typeof Push> {

const details = await getBranchDetailsWithPgRoll(xata, { workspace, region, database, branch });

let logs: Schemas.MigrationHistoryItem[] | Schemas.Commit[] = [];
let logs: (Schemas.MigrationHistoryItem | Schemas.Commit)[] = [];
let cursor = undefined;
if (isBranchPgRollEnabled(details)) {
const { migrations } = await xata.api.migrations.getMigrationHistory({
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` }
});
logs = migrations;
do {
const { migrations, cursor: newCursor } = await xata.api.migrations.getMigrationHistory({
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
queryParams: { cursor, limit: 200 }
});

logs = logs.concat(migrations);
cursor = newCursor;
} while (cursor !== undefined);
} else {
const data = await xata.api.migrations.getBranchSchemaHistory({
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
body: {
// TODO: Fix pagination in the API to start from last known migration and not from the beginning
// Also paginate until we get all migrations
page: { size: 200 }
}
body: { page: { size: 200 } }
});
logs = data.logs;
}
Expand Down
4 changes: 2 additions & 2 deletions cli/src/commands/push/push.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ const pgrollFetchSingle = (url: string, request: any, type: 'inferred' | 'pgroll
schema: { tables: [{ name: 'table1', columns: [{ name: 'a', type: 'string' }] }] }
})
};
} else if (url === `${baseUrl}/migrations/history` && request.method === 'GET') {
} else if (url === `${baseUrl}/migrations/history?limit=200` && request.method === 'GET') {
return {
ok: true,
json: async () => (type === 'inferred' ? { migrations: [pgrollMigration3] } : { migrations: [pgrollMigration1] })
Expand All @@ -263,7 +263,7 @@ const pgrollFetchEmpty = (url: string, request: any) => {
schema: { tables: [{ name: 'table1', columns: [{ name: 'a', type: 'string' }] }] }
})
};
} else if (url === `${baseUrl}/migrations/history` && request.method === 'GET') {
} else if (url === `${baseUrl}/migrations/history?limit=200` && request.method === 'GET') {
return {
ok: true,
json: async () => ({
Expand Down
2 changes: 1 addition & 1 deletion cli/src/migrations/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ export async function removeLocalMigrations() {
}
}

export function commitToMigrationFile(logs: Schemas.Commit[] | Schemas.MigrationHistoryItem[]): LocalMigrationFile[] {
export function commitToMigrationFile(logs: (Schemas.Commit | Schemas.MigrationHistoryItem)[]): LocalMigrationFile[] {
// Schema history comes in reverse order, so we need to reverse it
return logs.reverse().map((log) =>
isMigrationPgRollFormat(log)
Expand Down
7 changes: 7 additions & 0 deletions openapi-codegen.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,13 @@ export default defineConfig({
to: '#/components/schemas/XataRecord'
});

// Avoid conflict with duplicated `PageSize` type
context.openAPIDocument = renameComponent({
openAPIDocument: context.openAPIDocument,
from: '#/components/schemas/PageSize',
to: '#/components/schemas/PaginationPageSize'
});

context.openAPIDocument = removeDeprecatedObjectType({ openAPIDocument: context.openAPIDocument });

// Inject path param in all requests (for now, this should be server url variables)
Expand Down
108 changes: 107 additions & 1 deletion packages/client/src/api/dataPlaneComponents.ts
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,98 @@ export const startMigration = (variables: StartMigrationVariables, signal?: Abor
signal
});

export type CompleteMigrationPathParams = {
/**
* The DBBranchName matches the pattern `{db_name}:{branch_name}`.
*/
dbBranchName: Schemas.DBBranchName;
workspace: string;
region: string;
};

export type CompleteMigrationError = Fetcher.ErrorWrapper<
| {
status: 400;
payload: Responses.BadRequestError;
}
| {
status: 401;
payload: Responses.AuthError;
}
| {
status: 404;
payload: Responses.SimpleError;
}
>;

export type CompleteMigrationVariables = {
pathParams: CompleteMigrationPathParams;
} & DataPlaneFetcherExtraProps;

/**
* Complete an active migration on the specified database
*/
export const completeMigration = (variables: CompleteMigrationVariables, signal?: AbortSignal) =>
dataPlaneFetch<
Schemas.CompleteMigrationResponse,
CompleteMigrationError,
undefined,
{},
{},
CompleteMigrationPathParams
>({
url: '/db/{dbBranchName}/migrations/complete',
method: 'post',
...variables,
signal
});

export type RollbackMigrationPathParams = {
/**
* The DBBranchName matches the pattern `{db_name}:{branch_name}`.
*/
dbBranchName: Schemas.DBBranchName;
workspace: string;
region: string;
};

export type RollbackMigrationError = Fetcher.ErrorWrapper<
| {
status: 400;
payload: Responses.BadRequestError;
}
| {
status: 401;
payload: Responses.AuthError;
}
| {
status: 404;
payload: Responses.SimpleError;
}
>;

export type RollbackMigrationVariables = {
pathParams: RollbackMigrationPathParams;
} & DataPlaneFetcherExtraProps;

/**
* Roll back an active migration on the specified database
*/
export const rollbackMigration = (variables: RollbackMigrationVariables, signal?: AbortSignal) =>
dataPlaneFetch<
Schemas.RollbackMigrationResponse,
RollbackMigrationError,
undefined,
{},
{},
RollbackMigrationPathParams
>({
url: '/db/{dbBranchName}/migrations/rollback',
method: 'post',
...variables,
signal
});

export type AdaptTablePathParams = {
/**
* The DBBranchName matches the pattern `{db_name}:{branch_name}`.
Expand Down Expand Up @@ -305,6 +397,17 @@ export type GetMigrationHistoryPathParams = {
region: string;
};

export type GetMigrationHistoryQueryParams = {
/**
* @format date-time
*/
cursor?: string;
/**
* Page size
*/
limit?: Schemas.PaginationPageSize;
};

export type GetMigrationHistoryError = Fetcher.ErrorWrapper<
| {
status: 400;
Expand All @@ -322,6 +425,7 @@ export type GetMigrationHistoryError = Fetcher.ErrorWrapper<

export type GetMigrationHistoryVariables = {
pathParams: GetMigrationHistoryPathParams;
queryParams?: GetMigrationHistoryQueryParams;
} & DataPlaneFetcherExtraProps;

export const getMigrationHistory = (variables: GetMigrationHistoryVariables, signal?: AbortSignal) =>
Expand All @@ -330,7 +434,7 @@ export const getMigrationHistory = (variables: GetMigrationHistoryVariables, sig
GetMigrationHistoryError,
undefined,
{},
{},
GetMigrationHistoryQueryParams,
GetMigrationHistoryPathParams
>({
url: '/db/{dbBranchName}/migrations/history',
Expand Down Expand Up @@ -5049,6 +5153,8 @@ export const operationsByTag = {
migrations: {
applyMigration,
startMigration,
completeMigration,
rollbackMigration,
adaptTable,
adaptAllTables,
getBranchMigrationJobStatus,
Expand Down
7 changes: 7 additions & 0 deletions packages/client/src/api/dataPlaneParameters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,13 @@ export type TableNameParam = Schemas.TableName;

export type MigrationJobIDParam = Schemas.MigrationJobID;

/**
* @format date-time
*/
export type TimestampCursorParam = string;

export type LimitParam = Schemas.PaginationPageSize;

export type DBNameParam = Schemas.DBName;

export type MigrationRequestNumberParam = Schemas.MigrationRequestNumber;
Expand Down
Loading

0 comments on commit af38c4b

Please sign in to comment.