Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Regenerate bigquerystorage client #4730

Merged
merged 1 commit into from
Mar 23, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storag
option java_outer_classname = "AvroProto";
option java_package = "com.google.cloud.bigquery.storage.v1beta1";


// Avro schema.
message AvroSchema {
// Json serialized schema, as described at
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package google.cloud.bigquery.storage.v1beta1;
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage";
option java_package = "com.google.cloud.bigquery.storage.v1beta1";


// Options dictating how we read a table.
message TableReadOptions {
// Optional. Names of the fields in the table that should be read. If empty,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ syntax = "proto3";

package google.cloud.bigquery.storage.v1beta1;

import "google/api/annotations.proto";
import "google/api/resource.proto";
import "google/cloud/bigquery/storage/v1beta1/avro.proto";
import "google/cloud/bigquery/storage/v1beta1/read_options.proto";
import "google/cloud/bigquery/storage/v1beta1/table_reference.proto";
Expand All @@ -26,6 +28,7 @@ import "google/protobuf/timestamp.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage";
option java_package = "com.google.cloud.bigquery.storage.v1beta1";


// BigQuery storage API.
//
// The BigQuery storage API can be used to read data stored in BigQuery.
Expand All @@ -42,7 +45,16 @@ service BigQueryStorage {
//
// Read sessions automatically expire 24 hours after they are created and do
// not require manual clean-up by the caller.
rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) {}
rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) {
option (google.api.http) = {
post: "/v1beta1/{table_reference.project_id=projects/*}"
body: "*"
additional_bindings {
post: "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}"
body: "*"
}
};
}

// Reads rows from the table in the format prescribed by the read session.
// Each response contains one or more table rows, up to a maximum of 10 MiB
Expand All @@ -53,13 +65,21 @@ service BigQueryStorage {
// estimated total number of rows in the read stream. This number is computed
// based on the total table size and the number of active streams in the read
// session, and may change as other streams continue to read data.
rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) {}
rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) {
option (google.api.http) = {
get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}"
};
}

// Creates additional streams for a ReadSession. This API can be used to
// dynamically adjust the parallelism of a batch processing task upwards by
// adding additional workers.
rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest)
returns (BatchCreateReadSessionStreamsResponse) {}
rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) returns (BatchCreateReadSessionStreamsResponse) {
option (google.api.http) = {
post: "/v1beta1/{session.name=projects/*/sessions/*}"
body: "*"
};
}

// Triggers the graceful termination of a single stream in a ReadSession. This
// API can be used to dynamically adjust the parallelism of a batch processing
Expand All @@ -75,7 +95,12 @@ service BigQueryStorage {
// This method will return an error if there are no other live streams
// in the Session, or if SplitReadStream() has been called on the given
// Stream.
rpc FinalizeStream(FinalizeStreamRequest) returns (google.protobuf.Empty) {}
rpc FinalizeStream(FinalizeStreamRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
post: "/v1beta1/{stream.name=projects/*/streams/*}"
body: "*"
};
}

// Splits a given read stream into two Streams. These streams are referred to
// as the primary and the residual of the split. The original stream can still
Expand All @@ -90,8 +115,11 @@ service BigQueryStorage {
// completion.
//
// This method is guaranteed to be idempotent.
rpc SplitReadStream(SplitReadStreamRequest)
returns (SplitReadStreamResponse) {}
rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) {
option (google.api.http) = {
get: "/v1beta1/{original_stream.name=projects/*/streams/*}"
};
}
}

// Information about a single data stream within a read session.
Expand Down
Loading