diff --git a/sdks/python/apache_beam/yaml/examples/testing/examples_test.py b/sdks/python/apache_beam/yaml/examples/testing/examples_test.py index 4f0516a1ea93..a4b8c8c4bdd5 100644 --- a/sdks/python/apache_beam/yaml/examples/testing/examples_test.py +++ b/sdks/python/apache_beam/yaml/examples/testing/examples_test.py @@ -679,7 +679,8 @@ def _kafka_test_preprocessor( 'test_anomaly_scoring_yaml', 'test_wordCountInclude_yaml', 'test_wordCountImport_yaml', - 'test_iceberg_to_alloydb_yaml' + 'test_iceberg_to_alloydb_yaml', + 'test_bigquery_write_yaml' ]) def _io_write_test_preprocessor( test_spec: dict, expected: List[str], env: TestEnvironment): diff --git a/sdks/python/apache_beam/yaml/examples/transforms/io/bigquery_write.yaml b/sdks/python/apache_beam/yaml/examples/transforms/io/bigquery_write.yaml new file mode 100644 index 000000000000..bc3fd50f202f --- /dev/null +++ b/sdks/python/apache_beam/yaml/examples/transforms/io/bigquery_write.yaml @@ -0,0 +1,59 @@ +# coding=utf-8 +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# This pipeline demonstrates a simple write to BigQuery using Dynamic Destinations +# (see https://cloud.google.com/dataflow/docs/guides/managed-io#dynamic-destinations). +# +# Prerequisites: +# - A GCP project +# - A BigQuery dataset +# - Google Cloud credentials set up +# +# Usage: +# python -m apache_beam.yaml.main --yaml_pipeline_file= \ +# --project= \ +# --temp_location=gs:///temp + +pipeline: + type: chain + transforms: + - type: Create + name: CreateSampleData + config: + elements: + - {id: 1, name: "Alice", country: "CN"} + - {id: 2, name: "Bob", country: "UK"} + - {id: 3, name: "Charlie", country: "US"} + - type: WriteToBigQuery + name: WriteToTable + config: + table: ":.users_{country}" + create_disposition: CREATE_IF_NEEDED + write_disposition: WRITE_APPEND + num_streams: 1 + +options: + temp_location: "gs:///temp" + +# Expected: +# Row(id=1, name='Alice', country='CN') +# Row(id=2, name='Bob', country='UK') +# Row(id=3, name='Charlie', country='US') + + +