Skip to content

Commit 2a46ec3

Browse files
fix: update JsonStreamWriterBuilder comment and update sample to use the latest schema retrieval support (#1902)
* fix: update sample to use the latest schema retrival support * . * . * . * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * . * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 065cc4f commit 2a46ec3

File tree

2 files changed

+13
-10
lines changed

2 files changed

+13
-10
lines changed

google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -320,6 +320,11 @@ private void setStreamWriterSettings(
320320
* newBuilder that constructs a JsonStreamWriter builder with BigQuery client being initialized by
321321
* StreamWriter by default.
322322
*
323+
* <p>The table schema passed in will be updated automatically when there is a schema update
324+
* event. When used for Writer creation, it should be the latest schema. So when you are trying to
325+
* reuse a stream, you should use Builder newBuilder( String streamOrTableName,
326+
* BigQueryWriteClient client) instead, so the created Writer will be based on a fresh schema.
327+
*
323328
* @param streamOrTableName name of the stream that must follow
324329
* "projects/[^/]+/datasets/[^/]+/tables/[^/]+/streams/[^/]+" or table name
325330
* "projects/[^/]+/datasets/[^/]+/tables/[^/]+"
@@ -336,6 +341,11 @@ public static Builder newBuilder(String streamOrTableName, TableSchema tableSche
336341
/**
337342
* newBuilder that constructs a JsonStreamWriter builder.
338343
*
344+
* <p>The table schema passed in will be updated automatically when there is a schema update
345+
* event. When used for Writer creation, it should be the latest schema. So when you are trying to
346+
* reuse a stream, you should use Builder newBuilder( String streamOrTableName,
347+
* BigQueryWriteClient client) instead, so the created Writer will be based on a fresh schema.
348+
*
339349
* @param streamOrTableName name of the stream that must follow
340350
* "projects/[^/]+/datasets/[^/]+/tables/[^/]+/streams/[^/]+"
341351
* @param tableSchema The schema of the table when the stream was created, which is passed back

samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -24,16 +24,14 @@
2424
import com.google.cloud.bigquery.BigQuery;
2525
import com.google.cloud.bigquery.BigQueryOptions;
2626
import com.google.cloud.bigquery.QueryJobConfiguration;
27-
import com.google.cloud.bigquery.Schema;
28-
import com.google.cloud.bigquery.Table;
2927
import com.google.cloud.bigquery.TableResult;
3028
import com.google.cloud.bigquery.storage.v1.AppendRowsResponse;
29+
import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient;
3130
import com.google.cloud.bigquery.storage.v1.Exceptions;
3231
import com.google.cloud.bigquery.storage.v1.Exceptions.AppendSerializtionError;
3332
import com.google.cloud.bigquery.storage.v1.Exceptions.StorageException;
3433
import com.google.cloud.bigquery.storage.v1.JsonStreamWriter;
3534
import com.google.cloud.bigquery.storage.v1.TableName;
36-
import com.google.cloud.bigquery.storage.v1.TableSchema;
3735
import com.google.common.collect.ImmutableList;
3836
import com.google.common.util.concurrent.MoreExecutors;
3937
import com.google.protobuf.Descriptors.DescriptorValidationException;
@@ -146,17 +144,12 @@ private static class DataWriter {
146144

147145
public void initialize(TableName parentTable)
148146
throws DescriptorValidationException, IOException, InterruptedException {
149-
// Retrive table schema information.
150-
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
151-
Table table = bigquery.getTable(parentTable.getDataset(), parentTable.getTable());
152-
Schema schema = table.getDefinition().getSchema();
153-
TableSchema tableSchema = BqToBqStorageSchemaConverter.convertTableSchema(schema);
154-
155147
// Use the JSON stream writer to send records in JSON format. Specify the table name to write
156148
// to the default stream.
157149
// For more information about JsonStreamWriter, see:
158150
// https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.html
159-
streamWriter = JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build();
151+
streamWriter =
152+
JsonStreamWriter.newBuilder(parentTable.toString(), BigQueryWriteClient.create()).build();
160153
}
161154

162155
public void append(AppendContext appendContext)

0 commit comments

Comments
 (0)