@@ -240,6 +240,73 @@ function loadLocalFile(datasetId, tableId, filename, projectId) {
240240 // [END bigquery_load_from_file]
241241}
242242
243+ function loadParquetFromGCS ( datasetId , tableId , projectId ) {
244+ // [START bigquery_load_table_gcs_parquet]
245+ // Imports the Google Cloud client libraries
246+ const BigQuery = require ( '@google-cloud/bigquery' ) ;
247+ const Storage = require ( '@google-cloud/storage' ) ;
248+
249+ /**
250+ * TODO(developer): Uncomment the following lines before running the sample.
251+ */
252+ // const projectId = "your-project-id";
253+ // const datasetId = "my_dataset";
254+ // const tableId = "my_table";
255+
256+ /**
257+ * This sample loads the Parquet file at
258+ * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.parquet
259+ *
260+ * TODO(developer): Replace the following lines with the path to your file.
261+ */
262+ const bucketName = 'cloud-samples-data' ;
263+ const filename = 'bigquery/us-states/us-states.parquet' ;
264+
265+ // Instantiates clients
266+ const bigquery = new BigQuery ( {
267+ projectId : projectId ,
268+ } ) ;
269+
270+ const storage = new Storage ( {
271+ projectId : projectId ,
272+ } ) ;
273+
274+ // Configure the load job. For full list of options, see:
275+ // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load
276+ const metadata = {
277+ sourceFormat : 'PARQUET' ,
278+ schema : {
279+ fields : [
280+ { name : 'name' , type : 'STRING' } ,
281+ { name : 'post_abbr' , type : 'STRING' } ,
282+ ] ,
283+ } ,
284+ } ;
285+
286+ // Loads data from a Google Cloud Storage file into the table
287+ bigquery
288+ . dataset ( datasetId )
289+ . table ( tableId )
290+ . load ( storage . bucket ( bucketName ) . file ( filename ) , metadata )
291+ . then ( results => {
292+ const job = results [ 0 ] ;
293+
294+ // load() waits for the job to finish
295+ assert . equal ( job . status . state , 'DONE' ) ;
296+ console . log ( `Job ${ job . id } completed.` ) ;
297+
298+ // Check the job's status for errors
299+ const errors = job . status . errors ;
300+ if ( errors && errors . length > 0 ) {
301+ throw errors ;
302+ }
303+ } )
304+ . catch ( err => {
305+ console . error ( 'ERROR:' , err ) ;
306+ } ) ;
307+ // [END bigquery_load_table_gcs_parquet]
308+ }
309+
243310function loadCSVFromGCS ( datasetId , tableId , projectId ) {
244311 // [START bigquery_load_table_gcs_csv]
245312 // Imports the Google Cloud client libraries
@@ -667,6 +734,14 @@ require(`yargs`)
667734 ) ;
668735 }
669736 )
737+ . command (
738+ `load-gcs-parquet <projectId> <datasetId> <tableId>` ,
739+ `Loads sample Parquet data from a Google Cloud Storage file into a table.` ,
740+ { } ,
741+ opts => {
742+ loadParquetFromGCS ( opts . datasetId , opts . tableId , opts . projectId ) ;
743+ }
744+ )
670745 . command (
671746 `load-gcs-csv <projectId> <datasetId> <tableId>` ,
672747 `Loads sample CSV data from a Google Cloud Storage file into a table.` ,
0 commit comments