Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 23 additions & 3 deletions src/batch-transaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import {PreciseDate} from '@google-cloud/precise-date';
import {promisifyAll} from '@google-cloud/promisify';
import * as extend from 'extend';
import * as is from 'is';
import {ExecuteSqlRequest, Snapshot} from './transaction';
import {ReadRequest, ExecuteSqlRequest, Snapshot} from './transaction';
import {google} from '../protos/protos';
import {Session, Database} from '.';
import {
Expand All @@ -35,6 +35,16 @@ export interface TransactionIdentifier {
timestamp?: google.protobuf.ITimestamp;
}

export type CreateReadPartitionsResponse = [
google.spanner.v1.IPartitionReadRequest,
google.spanner.v1.IPartitionResponse,
];

export type CreateReadPartitionsCallback = ResourceCallback<
google.spanner.v1.IPartitionReadRequest,
google.spanner.v1.IPartitionResponse
>;

export type CreateQueryPartitionsResponse = [
google.spanner.v1.IPartitionQueryRequest,
google.spanner.v1.IPartitionResponse,
Expand Down Expand Up @@ -284,7 +294,17 @@ class BatchTransaction extends Snapshot {
* @param {CreateReadPartitionsCallback} [callback] Callback function.
* @returns {Promise<CreateReadPartitionsResponse>}
*/
createReadPartitions(options, callback) {
createReadPartitions(
options: ReadRequest,
): Promise<CreateReadPartitionsResponse>;
createReadPartitions(
options: ReadRequest,
callback: CreateReadPartitionsCallback,
): void;
createReadPartitions(
options: ReadRequest,
cb?: CreateReadPartitionsCallback,
): void | Promise<CreateReadPartitionsResponse> {
const traceConfig: traceConfig = {
opts: this._observabilityOptions,
dbName: this.getDBName(),
Expand Down Expand Up @@ -321,7 +341,7 @@ class BatchTransaction extends Snapshot {
}

span.end();
callback(err, partitions, resp);
cb!(err, partitions, resp);
},
);
},
Expand Down
30 changes: 29 additions & 1 deletion test/batch-transaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ describe('BatchTransaction', () => {
directedReadOptions: fakeDirectedReadOptionsForRequest,
};

it('should make the correct request', () => {
it('should make the correct request using callback', () => {
const fakeKeySet = {};
const expectedQuery = {
table: QUERY.table,
Expand All @@ -398,6 +398,34 @@ describe('BatchTransaction', () => {
Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}),
);
});

it('should make the correct request using await', async () => {
const fakeKeySet = {};
const expectedQuery = {
table: QUERY.table,
keySet: fakeKeySet,
dataBoostEnabled: true,
directedReadOptions: fakeDirectedReadOptionsForRequest,
};

const stub = sandbox.stub(batchTransaction, 'createPartitions_');

(sandbox.stub(FakeTransaction, 'encodeKeySet') as sinon.SinonStub)
.withArgs(QUERY)
.returns(fakeKeySet);

await batchTransaction.createReadPartitions(QUERY);

const {client, method, reqOpts, gaxOpts, headers} = stub.lastCall.args[0];
assert.strictEqual(client, 'SpannerClient');
assert.strictEqual(method, 'partitionRead');
assert.deepStrictEqual(reqOpts, expectedQuery);
assert.strictEqual(gaxOpts, GAX_OPTS);
assert.deepStrictEqual(
headers,
Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}),
);
});
});

describe('execute', () => {
Expand Down
15 changes: 13 additions & 2 deletions test/mockserver/mockspanner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,7 @@ export class MockSpanner {

this.read = this.read.bind(this);
this.streamingRead = this.streamingRead.bind(this);
this.partitionRead = this.partitionRead.bind(this);
}

/**
Expand Down Expand Up @@ -993,11 +994,21 @@ export class MockSpanner {
}

partitionRead(
call: grpc.ServerUnaryCall<protobuf.PartitionReadRequest, {}>,
call: grpc.ServerUnaryCall<
protobuf.PartitionReadRequest,
protobuf.PartitionResponse
>,
callback: protobuf.Spanner.PartitionReadCallback,
) {
this.pushRequest(call.request!, call.metadata);
callback(createUnimplementedError('PartitionQuery is not yet implemented'));
this.simulateExecutionTime(this.partitionRead.name)
.then(() => {
const response = protobuf.PartitionResponse.create({
partitions: [{partitionToken: Buffer.from('mock-token')}],
});
callback(null, response);
})
.catch(err => callback(err));
}

private _updateTransaction(
Expand Down
17 changes: 17 additions & 0 deletions test/spanner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3651,6 +3651,23 @@ describe('Spanner with mock server', () => {
});
});

describe('createReadPartitions', () => {
it('should create set of read partitions', async () => {
const database = newTestDatabase({min: 0, incStep: 1});
const query = {
table: 'abc',
keys: ['a', 'b'],
ranges: [{}, {}],
gaxOptions: {},
dataBoostEnabled: true,
};
const [transaction] = await database.createBatchTransaction();
const [readPartitions] = await transaction.createReadPartitions(query);
assert.strictEqual(Object.keys(readPartitions).length, 1);
assert.strictEqual(readPartitions[0].table, 'abc');
});
});

describe('createQueryPartitions', () => {
it('should create set of query partitions', async () => {
const database = newTestDatabase({min: 0, incStep: 1});
Expand Down
Loading