1818const { assert} = require ( 'chai' ) ;
1919const path = require ( 'path' ) ;
2020const uuid = require ( 'uuid' ) ;
21- const execa = require ( 'execa ' ) ;
21+ const cp = require ( 'child_process ' ) ;
2222const { Storage} = require ( '@google-cloud/storage' ) ;
2323const { BigQuery} = require ( '@google-cloud/bigquery' ) ;
2424
25+ const execSync = cmd => cp . execSync ( cmd , { encoding : 'utf-8' } ) ;
26+
2527const storage = new Storage ( ) ;
26- const exec = async cmd => {
27- const res = await execa . shell ( cmd ) ;
28- assert . isEmpty ( res . stderr ) ;
29- return res . stdout ;
30- } ;
3128const generateUuid = ( ) => `gcloud-tests-${ uuid . v4 ( ) } ` . replace ( / - / gi, '_' ) ;
3229
3330const datasetId = generateUuid ( ) ;
@@ -82,10 +79,10 @@ describe('Tables', () => {
8279 } ) ;
8380
8481 it ( `should create a table` , async ( ) => {
85- const output = await exec (
82+ const output = execSync (
8683 `node createTable.js ${ datasetId } ${ tableId } "${ schema } "`
8784 ) ;
88- assert . strictEqual ( output , `Table ${ tableId } created.` ) ;
85+ assert . include ( output , `Table ${ tableId } created.` ) ;
8986 const [ exists ] = await bigquery
9087 . dataset ( datasetId )
9188 . table ( tableId )
@@ -94,13 +91,13 @@ describe('Tables', () => {
9491 } ) ;
9592
9693 it ( `should list tables` , async ( ) => {
97- const output = await exec ( `node listTables.js ${ datasetId } ` ) ;
94+ const output = execSync ( `node listTables.js ${ datasetId } ` ) ;
9895 assert . match ( output , / T a b l e s : / ) ;
9996 assert . match ( output , new RegExp ( tableId ) ) ;
10097 } ) ;
10198
10299 it ( `should load a local CSV file` , async ( ) => {
103- const output = await exec (
100+ const output = execSync (
104101 `node loadLocalFile.js ${ datasetId } ${ tableId } ${ localFilePath } `
105102 ) ;
106103 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -112,15 +109,15 @@ describe('Tables', () => {
112109 } ) ;
113110
114111 it ( `should browse table rows` , async ( ) => {
115- const output = await exec ( `node browseRows.js ${ datasetId } ${ tableId } ` ) ;
116- assert . strictEqual (
112+ const output = execSync ( `node browseRows.js ${ datasetId } ${ tableId } ` ) ;
113+ assert . include (
117114 output ,
118115 `Rows:\n{ Name: 'Gandalf', Age: 2000, Weight: 140, IsMagic: true }`
119116 ) ;
120117 } ) ;
121118
122119 it ( `should extract a table to GCS` , async ( ) => {
123- const output = await exec (
120+ const output = execSync (
124121 `node extractTableToGCS.js ${ datasetId } ${ tableId } ${ bucketName } ${ exportFileName } `
125122 ) ;
126123
@@ -134,9 +131,7 @@ describe('Tables', () => {
134131
135132 it ( `should load a GCS ORC file` , async ( ) => {
136133 const tableId = generateUuid ( ) ;
137- const output = await exec (
138- `node loadTableGCSORC.js ${ datasetId } ${ tableId } `
139- ) ;
134+ const output = execSync ( `node loadTableGCSORC.js ${ datasetId } ${ tableId } ` ) ;
140135 assert . match ( output , / c o m p l e t e d \. / ) ;
141136 const [ rows ] = await bigquery
142137 . dataset ( datasetId )
@@ -147,7 +142,7 @@ describe('Tables', () => {
147142
148143 it ( `should load a GCS Parquet file` , async ( ) => {
149144 const tableId = generateUuid ( ) ;
150- const output = await exec (
145+ const output = execSync (
151146 `node loadTableGCSParquet.js ${ datasetId } ${ tableId } `
152147 ) ;
153148 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -160,7 +155,7 @@ describe('Tables', () => {
160155
161156 it ( `should load a GCS CSV file with explicit schema` , async ( ) => {
162157 const tableId = generateUuid ( ) ;
163- const output = await exec ( `node loadCSVFromGCS.js ${ datasetId } ${ tableId } ` ) ;
158+ const output = execSync ( `node loadCSVFromGCS.js ${ datasetId } ${ tableId } ` ) ;
164159 assert . match ( output , / c o m p l e t e d \. / ) ;
165160 const [ rows ] = await bigquery
166161 . dataset ( datasetId )
@@ -171,9 +166,7 @@ describe('Tables', () => {
171166
172167 it ( `should load a GCS JSON file with explicit schema` , async ( ) => {
173168 const tableId = generateUuid ( ) ;
174- const output = await exec (
175- `node loadJSONFromGCS.js ${ datasetId } ${ tableId } `
176- ) ;
169+ const output = execSync ( `node loadJSONFromGCS.js ${ datasetId } ${ tableId } ` ) ;
177170 assert . match ( output , / c o m p l e t e d \. / ) ;
178171 const [ rows ] = await bigquery
179172 . dataset ( datasetId )
@@ -184,7 +177,7 @@ describe('Tables', () => {
184177
185178 it ( `should load a GCS CSV file with autodetected schema` , async ( ) => {
186179 const tableId = generateUuid ( ) ;
187- const output = await exec (
180+ const output = execSync (
188181 `node loadCSVFromGCSAutodetect.js ${ datasetId } ${ tableId } `
189182 ) ;
190183 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -197,7 +190,7 @@ describe('Tables', () => {
197190
198191 it ( `should load a GCS JSON file with autodetected schema` , async ( ) => {
199192 const tableId = generateUuid ( ) ;
200- const output = await exec (
193+ const output = execSync (
201194 `node loadJSONFromGCSAutodetect.js ${ datasetId } ${ tableId } `
202195 ) ;
203196 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -210,7 +203,7 @@ describe('Tables', () => {
210203
211204 it ( `should load a GCS CSV file truncate table` , async ( ) => {
212205 const tableId = generateUuid ( ) ;
213- const output = await exec (
206+ const output = execSync (
214207 `node loadCSVFromGCSTruncate.js ${ datasetId } ${ tableId } `
215208 ) ;
216209 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -223,7 +216,7 @@ describe('Tables', () => {
223216
224217 it ( `should load a GCS JSON file truncate table` , async ( ) => {
225218 const tableId = generateUuid ( ) ;
226- const output = await exec (
219+ const output = execSync (
227220 `node loadJSONFromGCSTruncate.js ${ datasetId } ${ tableId } `
228221 ) ;
229222 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -236,7 +229,7 @@ describe('Tables', () => {
236229
237230 it ( `should load a GCS parquet file truncate table` , async ( ) => {
238231 const tableId = generateUuid ( ) ;
239- const output = await exec (
232+ const output = execSync (
240233 `node loadParquetFromGCSTruncate.js ${ datasetId } ${ tableId } `
241234 ) ;
242235 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -249,7 +242,7 @@ describe('Tables', () => {
249242
250243 it ( `should load a GCS ORC file truncate table` , async ( ) => {
251244 const tableId = generateUuid ( ) ;
252- const output = await exec (
245+ const output = execSync (
253246 `node loadOrcFromGCSTruncate.js ${ datasetId } ${ tableId } `
254247 ) ;
255248 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -261,7 +254,7 @@ describe('Tables', () => {
261254 } ) ;
262255
263256 it ( `should copy a table` , async ( ) => {
264- const output = await exec (
257+ const output = execSync (
265258 `node copyTable.js ${ srcDatasetId } ${ srcTableId } ${ destDatasetId } ${ destTableId } `
266259 ) ;
267260 assert . match ( output , / c o m p l e t e d \. / ) ;
@@ -273,15 +266,15 @@ describe('Tables', () => {
273266 } ) ;
274267
275268 it ( `should insert rows` , async ( ) => {
276- const output = await exec (
269+ const output = execSync (
277270 `node insertRowsAsStream.js ${ datasetId } ${ tableId } `
278271 ) ;
279272 assert . match ( output , / I n s e r t e d 2 r o w s / ) ;
280273 } ) ;
281274
282275 it ( `should delete a table` , async ( ) => {
283- const output = await exec ( `node deleteTable.js ${ datasetId } ${ tableId } ` ) ;
284- assert . strictEqual ( output , `Table ${ tableId } deleted.` ) ;
276+ const output = execSync ( `node deleteTable.js ${ datasetId } ${ tableId } ` ) ;
277+ assert . include ( output , `Table ${ tableId } deleted.` ) ;
285278 const [ exists ] = await bigquery
286279 . dataset ( datasetId )
287280 . table ( tableId )
0 commit comments