diff --git a/.clang-format b/.clang-format deleted file mode 100644 index 7d6cf97e..00000000 --- a/.clang-format +++ /dev/null @@ -1,3 +0,0 @@ -Language: JavaScript -BasedOnStyle: Google -ColumnLimit: 80 \ No newline at end of file diff --git a/.cloud-repo-tools.json b/.cloud-repo-tools.json deleted file mode 100644 index ee6b5bda..00000000 --- a/.cloud-repo-tools.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "requiresKeyFile": true, - "requiresProjectId": true, - "product": "bigquery", - "client_reference_url": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest/", - "release_quality": "ga", - "samples": [ - { - "id": "dataset-create", - "name": "Dataset create", - "file": "createDataset.js", - "docs_link": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest/" - }, - { - "id": "dataset-delete", - "name": "Dataset delete", - "file": "deleteDataset.js", - "docs_link": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest/" - }, - { - "id": "dataset-list", - "name": "Dataset list", - "file": "listDatasets.js", - "docs_link": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest/" - }, - { - "id": "tables", - "name": "Tables", - "file": "tables.js", - "docs_link": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest/", - "usage": "node tables.js --help" - }, - { - "id": "queries", - "name": "Queries", - "file": "queries.js", - "docs_link": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest/", - "usage": "node queries.js --help" - } - ] -} diff --git a/.eslintignore b/.eslintignore index f0c7aead..9340ad9b 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,4 +1,6 @@ **/node_modules -src/**/doc/* +**/coverage +test/fixtures build/ docs/ +protos/ diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 00000000..78215349 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/.eslintrc.yml b/.eslintrc.yml deleted file mode 100644 index 73eeec27..00000000 --- a/.eslintrc.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- -extends: - - 'eslint:recommended' - - 'plugin:node/recommended' - - prettier -plugins: - - node - - prettier -rules: - prettier/prettier: error - block-scoped-var: error - eqeqeq: error - no-warning-comments: warn - no-var: error - prefer-const: error diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..33739cb7 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +*.ts text eol=lf +*.js text eol=lf +protos/* linguist-generated +**/api-extractor.json linguist-language=JSON-with-Comments diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 00000000..b1434427 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + digest: sha256:06c970a44680229c1e8cefa701dbc93b80468ec4a34e6968475084e4ec1e2d7d diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 00000000..3a281cc9 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,19 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +docker: + image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + + +begin-after-commit-hash: 397c0bfd367a2427104f988d5329bc117caafd95 + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..6d82f1f7 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,12 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +# The yoshi-nodejs team is the default owner for nodejs repositories. +* @googleapis/yoshi-nodejs @googleapis/api-bigquery + +# The github automation team is the default owner for the auto-approve file. +.github/auto-approve.yml @googleapis/github-automation diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 0aad0573..af9cb7a3 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -8,13 +8,18 @@ Thanks for stopping by to let us know something could be better! **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. -Please run down the following list and make sure you've tried the usual "quick fixes": +1) Is this a client library issue or a product issue? +This is the client library for . We will only be able to assist with issues that pertain to the behaviors of this library. If the issue you're experiencing is due to the behavior of the product itself, please visit the [ Support page]() to reach the most relevant engineers. +2) Did someone already solve this? - Search the issues already opened: https://github.com/googleapis/nodejs-bigquery/issues - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js + - Search or ask on StackOverflow (engineers monitor these tags): http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js -If you are still having issues, please be sure to include as much information as possible: +3) Do you have a support contract? +Please create an issue in the [support console](https://cloud.google.com/support/) to ensure a timely response. + +If the support paths suggested above still do not result in a resolution, please provide the following details. #### Environment details diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 80975030..c378ea75 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,7 @@ -Fixes # (it's a good idea to open an issue first for discussion) - -- [ ] Tests and linter pass +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 00000000..a79ba66c --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,12 @@ +rules: +- author: "release-please[bot]" + title: "^chore: release" + changedFiles: + - "package\\.json$" + - "CHANGELOG\\.md$" + maxFiles: 3 +- author: "renovate-bot" + title: "^(fix\\(deps\\)|chore\\(deps\\)):" + changedFiles: + - "/package\\.json$" + maxFiles: 2 diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 00000000..c2fd6a00 --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,6 @@ +assign_issues: + - stephenplusplus + - steffnay +assign_prs: + - stephenplusplus + - steffnay diff --git a/.github/generated-files-bot.yml b/.github/generated-files-bot.yml new file mode 100644 index 00000000..7bb7ce54 --- /dev/null +++ b/.github/generated-files-bot.yml @@ -0,0 +1,16 @@ +generatedFiles: +- path: '.kokoro/**' + message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/CODEOWNERS' + message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' +- path: '.github/workflows/ci.yaml' + message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/generated-files-bot.+(yml|yaml)' + message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: 'README.md' + message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' +- path: 'samples/README.md' + message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' +ignoreAuthors: +- 'gcf-owl-bot[bot]' +- 'yoshi-automation' diff --git a/.github/release-please.yml b/.github/release-please.yml new file mode 100644 index 00000000..a1b41da3 --- /dev/null +++ b/.github/release-please.yml @@ -0,0 +1,2 @@ +handleGHRelease: true +releaseType: node diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..f033c0d2 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,57 @@ +on: + push: + branches: + - master + pull_request: +name: ci +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + node: [10, 12, 14, 15] + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node }} + - run: node --version + # The first installation step ensures that all of our production + # dependencies work on the given Node.js version, this helps us find + # dependencies that don't match our engines field: + - run: npm install --production --engine-strict --ignore-scripts --no-package-lock + # Clean up the production install, before installing dev/production: + - run: rm -rf node_modules + - run: npm install + - run: npm test + env: + MOCHA_THROW_DEPRECATION: false + windows: + runs-on: windows-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: 14 + - run: npm install + - run: npm test + env: + MOCHA_THROW_DEPRECATION: false + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: 14 + - run: npm install + - run: npm run lint + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: 14 + - run: npm install + - run: npm run docs-test diff --git a/.gitignore b/.gitignore index 5ee54dbf..e869cd90 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,6 @@ system-test/secrets.js system-test/*key.json *.lock .DS_Store -google-cloud-logging-winston-*.tgz -google-cloud-logging-bunyan-*.tgz +__pycache__ .vscode package-lock.json diff --git a/.jsdoc.js b/.jsdoc.js index 408e377d..317fc3f0 100644 --- a/.jsdoc.js +++ b/.jsdoc.js @@ -1,18 +1,17 @@ -/*! - * Copyright 2018 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// 'use strict'; @@ -20,13 +19,14 @@ module.exports = { opts: { readme: './README.md', package: './package.json', - template: './node_modules/jsdoc-baseline', + template: './node_modules/jsdoc-fresh', recurse: true, verbose: true, destination: './docs/' }, plugins: [ - 'plugins/markdown' + 'plugins/markdown', + 'jsdoc-region-tag' ], source: { excludePattern: '(^|\\/|\\\\)[._]', @@ -36,10 +36,16 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2018 Google, LLC.', + copyright: 'Copyright 2019 Google, LLC.', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery', - theme: 'lumen' + theme: 'lumen', + default: { + "outputSourceFiles": false + } + }, + markdown: { + idInHeadings: true } }; diff --git a/.kokoro/.gitattributes b/.kokoro/.gitattributes new file mode 100644 index 00000000..87acd4f4 --- /dev/null +++ b/.kokoro/.gitattributes @@ -0,0 +1 @@ +* linguist-generated=true diff --git a/.kokoro/common.cfg b/.kokoro/common.cfg index 9256c217..9fd066ee 100644 --- a/.kokoro/common.cfg +++ b/.kokoro/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/.kokoro/continuous/node10/common.cfg b/.kokoro/continuous/node10/common.cfg index 9256c217..de7f471d 100644 --- a/.kokoro/continuous/node10/common.cfg +++ b/.kokoro/continuous/node10/common.cfg @@ -7,11 +7,21 @@ action { } } +# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} + # Download trampoline resources. gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/.kokoro/continuous/node10/system-test-grpcjs.cfg b/.kokoro/continuous/node10/system-test-grpcjs.cfg deleted file mode 100644 index 623103d0..00000000 --- a/.kokoro/continuous/node10/system-test-grpcjs.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/system-test.sh" -} - -env_vars: { - key: "GOOGLE_CLOUD_USE_GRPC_JS" - value: "1" -} diff --git a/.kokoro/continuous/node10/test.cfg b/.kokoro/continuous/node10/test.cfg index e69de29b..468b8c71 100644 --- a/.kokoro/continuous/node10/test.cfg +++ b/.kokoro/continuous/node10/test.cfg @@ -0,0 +1,9 @@ +# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} diff --git a/.kokoro/continuous/node11/common.cfg b/.kokoro/continuous/node12/common.cfg similarity index 80% rename from .kokoro/continuous/node11/common.cfg rename to .kokoro/continuous/node12/common.cfg index 8f2c148f..5e26eaed 100644 --- a/.kokoro/continuous/node11/common.cfg +++ b/.kokoro/continuous/node12/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:11-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/continuous/node10/lint.cfg b/.kokoro/continuous/node12/lint.cfg similarity index 100% rename from .kokoro/continuous/node10/lint.cfg rename to .kokoro/continuous/node12/lint.cfg diff --git a/.kokoro/continuous/node10/samples-test.cfg b/.kokoro/continuous/node12/samples-test.cfg similarity index 100% rename from .kokoro/continuous/node10/samples-test.cfg rename to .kokoro/continuous/node12/samples-test.cfg diff --git a/.kokoro/continuous/node10/system-test.cfg b/.kokoro/continuous/node12/system-test.cfg similarity index 100% rename from .kokoro/continuous/node10/system-test.cfg rename to .kokoro/continuous/node12/system-test.cfg diff --git a/.kokoro/continuous/node11/test.cfg b/.kokoro/continuous/node12/test.cfg similarity index 100% rename from .kokoro/continuous/node11/test.cfg rename to .kokoro/continuous/node12/test.cfg diff --git a/.kokoro/continuous/node8/test.cfg b/.kokoro/continuous/node8/test.cfg index 468b8c71..e69de29b 100644 --- a/.kokoro/continuous/node8/test.cfg +++ b/.kokoro/continuous/node8/test.cfg @@ -1,9 +0,0 @@ -# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} diff --git a/.kokoro/docs.sh b/.kokoro/docs.sh index a4f31879..85901242 100755 --- a/.kokoro/docs.sh +++ b/.kokoro/docs.sh @@ -14,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeo pipefail +set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. diff --git a/.kokoro/lint.sh b/.kokoro/lint.sh index 7c2ea2a2..aef4866e 100755 --- a/.kokoro/lint.sh +++ b/.kokoro/lint.sh @@ -14,18 +14,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeo pipefail +set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. npm install # Install and link samples -cd samples/ -npm link ../ -npm install -cd .. +if [ -f samples/package.json ]; then + cd samples/ + npm link ../ + npm install + cd .. +fi npm run lint diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 00000000..deb2b199 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,76 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file is called in the early stage of `trampoline_v2.sh` to +# populate secrets needed for the CI builds. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + +# Populates requested secrets set in SECRET_MANAGER_KEYS + +# In Kokoro CI builds, we use the service account attached to the +# Kokoro VM. This means we need to setup auth on other CI systems. +# For local run, we just use the gcloud command for retrieving the +# secrets. + +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + GCLOUD_COMMANDS=( + "docker" + "run" + "--entrypoint=gcloud" + "--volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR}" + "gcr.io/google.com/cloudsdktool/cloud-sdk" + ) + if [[ "${TRAMPOLINE_CI:-}" == "kokoro" ]]; then + SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" + else + echo "Authentication for this CI system is not implemented yet." + exit 2 + # TODO: Determine appropriate SECRET_LOCATION and the GCLOUD_COMMANDS. + fi +else + # For local run, use /dev/shm or temporary directory for + # KOKORO_GFILE_DIR. + if [[ -d "/dev/shm" ]]; then + export KOKORO_GFILE_DIR=/dev/shm + else + export KOKORO_GFILE_DIR=$(mktemp -d -t ci-XXXXXXXX) + fi + SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" + GCLOUD_COMMANDS=("gcloud") +fi + +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} + +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + "${GCLOUD_COMMANDS[@]}" \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret $key > \ + "$SECRET_LOCATION/$key" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + exit 2 + fi +done diff --git a/.kokoro/presubmit/node10/common.cfg b/.kokoro/presubmit/node10/common.cfg index 9256c217..de7f471d 100644 --- a/.kokoro/presubmit/node10/common.cfg +++ b/.kokoro/presubmit/node10/common.cfg @@ -7,11 +7,21 @@ action { } } +# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} + # Download trampoline resources. gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/.kokoro/presubmit/node10/system-test-grpcjs.cfg b/.kokoro/presubmit/node10/system-test-grpcjs.cfg deleted file mode 100644 index 623103d0..00000000 --- a/.kokoro/presubmit/node10/system-test-grpcjs.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/system-test.sh" -} - -env_vars: { - key: "GOOGLE_CLOUD_USE_GRPC_JS" - value: "1" -} diff --git a/.kokoro/presubmit/node11/common.cfg b/.kokoro/presubmit/node11/common.cfg deleted file mode 100644 index 8f2c148f..00000000 --- a/.kokoro/presubmit/node11/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:11-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/test.sh" -} diff --git a/.kokoro/presubmit/node11/test.cfg b/.kokoro/presubmit/node11/test.cfg deleted file mode 100644 index e69de29b..00000000 diff --git a/.kokoro/continuous/node6/common.cfg b/.kokoro/presubmit/node12/common.cfg similarity index 80% rename from .kokoro/continuous/node6/common.cfg rename to .kokoro/presubmit/node12/common.cfg index 03d2426b..5e26eaed 100644 --- a/.kokoro/continuous/node6/common.cfg +++ b/.kokoro/presubmit/node12/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:6-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/presubmit/node10/samples-test.cfg b/.kokoro/presubmit/node12/samples-test.cfg similarity index 100% rename from .kokoro/presubmit/node10/samples-test.cfg rename to .kokoro/presubmit/node12/samples-test.cfg diff --git a/.kokoro/presubmit/node10/system-test.cfg b/.kokoro/presubmit/node12/system-test.cfg similarity index 100% rename from .kokoro/presubmit/node10/system-test.cfg rename to .kokoro/presubmit/node12/system-test.cfg diff --git a/.kokoro/continuous/node6/test.cfg b/.kokoro/presubmit/node12/test.cfg similarity index 100% rename from .kokoro/continuous/node6/test.cfg rename to .kokoro/presubmit/node12/test.cfg diff --git a/.kokoro/presubmit/node6/common.cfg b/.kokoro/presubmit/node6/common.cfg deleted file mode 100644 index 03d2426b..00000000 --- a/.kokoro/presubmit/node6/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:6-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/test.sh" -} diff --git a/.kokoro/presubmit/node6/test.cfg b/.kokoro/presubmit/node6/test.cfg deleted file mode 100644 index e69de29b..00000000 diff --git a/.kokoro/presubmit/node8/test.cfg b/.kokoro/presubmit/node8/test.cfg index 468b8c71..e69de29b 100644 --- a/.kokoro/presubmit/node8/test.cfg +++ b/.kokoro/presubmit/node8/test.cfg @@ -1,9 +0,0 @@ -# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} diff --git a/.kokoro/publish.sh b/.kokoro/publish.sh index f2a8adc0..4db6bf1c 100755 --- a/.kokoro/publish.sh +++ b/.kokoro/publish.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Start the releasetool reporter python3 -m pip install gcp-releasetool @@ -24,8 +24,8 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / cd $(dirname $0)/.. -NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google_cloud_npm_token) -echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > ~/.npmrc +NPM_TOKEN=$(cat $KOKORO_GFILE_DIR/secret_manager/npm_publish_token) +echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc npm install -npm publish --access=public +npm publish --access=public --registry=https://wombat-dressing-room.appspot.com diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg new file mode 100644 index 00000000..3ba2eb09 --- /dev/null +++ b/.kokoro/release/common.cfg @@ -0,0 +1,8 @@ +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} diff --git a/.kokoro/release/docs-devsite.cfg b/.kokoro/release/docs-devsite.cfg new file mode 100644 index 00000000..5eb4cf6a --- /dev/null +++ b/.kokoro/release/docs-devsite.cfg @@ -0,0 +1,26 @@ +# service account used to publish up-to-date docs. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# doc publications use a Python image. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/release/docs-devsite.sh" +} diff --git a/.kokoro/release/docs-devsite.sh b/.kokoro/release/docs-devsite.sh new file mode 100755 index 00000000..2198e67f --- /dev/null +++ b/.kokoro/release/docs-devsite.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +if [[ -z "$CREDENTIALS" ]]; then + # if CREDENTIALS are explicitly set, assume we're testing locally + # and don't set NPM_CONFIG_PREFIX. + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" + cd $(dirname $0)/../.. +fi + +npm install +npm install --no-save @google-cloud/cloud-rad@^0.2.5 +npx @google-cloud/cloud-rad \ No newline at end of file diff --git a/.kokoro/release/docs.cfg b/.kokoro/release/docs.cfg new file mode 100644 index 00000000..524c90d1 --- /dev/null +++ b/.kokoro/release/docs.cfg @@ -0,0 +1,26 @@ +# service account used to publish up-to-date docs. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# doc publications use a Python image. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/release/docs.sh" +} diff --git a/.kokoro/release/docs.sh b/.kokoro/release/docs.sh new file mode 100755 index 00000000..4c866c86 --- /dev/null +++ b/.kokoro/release/docs.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# build jsdocs (Python is installed on the Node 10 docker image). +if [[ -z "$CREDENTIALS" ]]; then + # if CREDENTIALS are explicitly set, assume we're testing locally + # and don't set NPM_CONFIG_PREFIX. + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" + cd $(dirname $0)/../.. +fi +npm install +npm run docs + +# create docs.metadata, based on package.json and .repo-metadata.json. +npm i json@9.0.6 -g +python3 -m pip install --user gcp-docuploader +python3 -m docuploader create-metadata \ + --name=$(cat .repo-metadata.json | json name) \ + --version=$(cat package.json | json version) \ + --language=$(cat .repo-metadata.json | json language) \ + --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ + --product-page=$(cat .repo-metadata.json | json product_documentation) \ + --github-repository=$(cat .repo-metadata.json | json repo) \ + --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) +cp docs.metadata ./docs/docs.metadata + +# deploy the docs. +if [[ -z "$CREDENTIALS" ]]; then + CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account +fi +if [[ -z "$BUCKET" ]]; then + BUCKET=docs-staging +fi +python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET diff --git a/.kokoro/release/publish.cfg b/.kokoro/release/publish.cfg index 130fd86e..861db826 100644 --- a/.kokoro/release/publish.cfg +++ b/.kokoro/release/publish.cfg @@ -1,23 +1,3 @@ -# Get npm token from Keystore -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_npm_token" - backend_type: FASTCONFIGPUSH - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - before_action { fetch_keystore { keystore_resource { @@ -27,36 +7,21 @@ before_action { } } -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } -} - -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "npm_publish_token,releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } # Download trampoline resources. gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" } env_vars: { diff --git a/.kokoro/samples-test.sh b/.kokoro/samples-test.sh index 5a81ec01..950f8483 100755 --- a/.kokoro/samples-test.sh +++ b/.kokoro/samples-test.sh @@ -14,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeo pipefail +set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json @@ -31,12 +31,38 @@ if [ -f .kokoro/pre-samples-test.sh ]; then set -x fi -npm install +if [ -f samples/package.json ]; then + npm install -# Install and link samples -cd samples/ -npm link ../ -npm install -cd .. + # Install and link samples + cd samples/ + npm link ../ + npm install + cd .. + # If tests are running against master, configure flakybot + # to open issues on failures: + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP + fi -npm run samples-test + npm run samples-test +fi + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=10 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/.kokoro/system-test.sh b/.kokoro/system-test.sh index fd8f0b63..319d1e0e 100755 --- a/.kokoro/system-test.sh +++ b/.kokoro/system-test.sh @@ -14,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeo pipefail +set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json @@ -33,4 +33,29 @@ fi npm install +# If tests are running against master, configure flakybot +# to open issues on failures: +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + npm run system-test + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=10 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/.kokoro/test.bat b/.kokoro/test.bat index 76732075..ae59e59b 100644 --- a/.kokoro/test.bat +++ b/.kokoro/test.bat @@ -17,7 +17,13 @@ cd /d %~dp0 cd .. -call npm install -g npm@latest || goto :error +@rem npm path is not currently set in our image, we should fix this next time +@rem we upgrade Node.js in the image: +SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm + +call nvm use v12.14.1 +call which node + call npm install || goto :error call npm run test || goto :error diff --git a/.kokoro/test.sh b/.kokoro/test.sh index 4d6c3f83..b5646aeb 100755 --- a/.kokoro/test.sh +++ b/.kokoro/test.sh @@ -14,14 +14,38 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeo pipefail +set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. npm install +# If tests are running against master, configure flakybot +# to open issues on failures: +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi +# Unit tests exercise the entire API surface, which may include +# deprecation warnings: +export MOCHA_THROW_DEPRECATION=false npm test -./node_modules/nyc/bin/nyc.js report -bash $KOKORO_GFILE_DIR/codecov.sh +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=10 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index 87ffd2ca..f693a1ce 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -13,7 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeo pipefail +# This file is not used any more, but we keep this file for making it +# easy to roll back. +# TODO: Remove this file from the template. + +set -eo pipefail # Always run the cleanup script, regardless of the success of bouncing into # the container. @@ -24,4 +28,5 @@ function cleanup() { } trap cleanup EXIT +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 00000000..4d031121 --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,490 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# If you want to make a change to this file, consider doing so at: +# https://github.com/googlecloudplatform/docker-ci-helper +# +# This script is for running CI builds. For Kokoro builds, we +# set this script to `build_file` field in the Kokoro configuration. + +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. +# +# Here is an example for running this script. +# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ +# TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ +# .kokoro/trampoline_v2.sh + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.7" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" + # Contains path to build artifacts being executed. + "KOKORO_BUILD_ARTIFACTS_SUBDIR" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For flakybot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}" | grep "${TRAMPOLINE_IMAGE%:*}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.mocharc.js b/.mocharc.js new file mode 100644 index 00000000..0b600509 --- /dev/null +++ b/.mocharc.js @@ -0,0 +1,29 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000, + "recursive": true +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/.nycrc b/.nycrc index 88b001cb..b18d5472 100644 --- a/.nycrc +++ b/.nycrc @@ -1,28 +1,24 @@ { "report-dir": "./.coverage", - "reporter": "lcov", + "reporter": ["text", "lcov"], "exclude": [ - "src/*{/*,/**/*}.js", - "src/*/v*/*.js", - "test/**/*.js", - "build/test" + "**/*-test", + "**/.coverage", + "**/apis", + "**/benchmark", + "**/conformance", + "**/docs", + "**/samples", + "**/scripts", + "**/protos", + "**/test", + "**/*.d.ts", + ".jsdoc.js", + "**/.jsdoc.js", + "karma.conf.js", + "webpack-tests.config.js", + "webpack.config.js" ], - "watermarks": { - "branches": [ - 95, - 100 - ], - "functions": [ - 95, - 100 - ], - "lines": [ - 95, - 100 - ], - "statements": [ - 95, - 100 - ] - } + "exclude-after-remap": false, + "all": true } diff --git a/.prettierignore b/.prettierignore index f6fac98b..9340ad9b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,3 +1,6 @@ -node_modules/* -samples/node_modules/* -src/**/doc/* +**/node_modules +**/coverage +test/fixtures +build/ +docs/ +protos/ diff --git a/.prettierrc b/.prettierrc deleted file mode 100644 index df6eac07..00000000 --- a/.prettierrc +++ /dev/null @@ -1,8 +0,0 @@ ---- -bracketSpacing: false -printWidth: 80 -semi: true -singleQuote: true -tabWidth: 2 -trailingComma: es5 -useTabs: false diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 00000000..d1b95106 --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,17 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/.repo-metadata.json b/.repo-metadata.json new file mode 100644 index 00000000..c1265409 --- /dev/null +++ b/.repo-metadata.json @@ -0,0 +1,14 @@ +{ + "name": "bigquery", + "name_pretty": "Google BigQuery", + "product_documentation": "https://cloud.google.com/bigquery", + "client_documentation": "https://googleapis.dev/nodejs/bigquery/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "release_level": "ga", + "language": "nodejs", + "repo": "googleapis/nodejs-bigquery", + "distribution_name": "@google-cloud/bigquery", + "api_id": "bigquery.googleapis.com", + "requires_billing": false, + "codeowner_team": "@googleapis/api-bigquery" +} diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 00000000..5fc22531 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,52 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "AUTORELEASE_PR" + "VERSION" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi + +# Secret Manager secrets. +source ${PROJECT_ROOT}/.kokoro/populate-secrets.sh diff --git a/CHANGELOG.md b/CHANGELOG.md index bc838d32..c1523fa9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,366 @@ [1]: https://www.npmjs.com/package/@google-cloud/bigquery?activeTab=versions +## [5.7.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.6.0...v5.7.0) (2021-07-21) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#958](https://www.github.com/googleapis/nodejs-bigquery/issues/958)) ([9eab78d](https://www.github.com/googleapis/nodejs-bigquery/commit/9eab78de56d087f3dc756ab2c0974737a80ad43f)) + + +### Bug Fixes + +* **deps:** update dependency yargs to v17 ([#954](https://www.github.com/googleapis/nodejs-bigquery/issues/954)) ([28cf08d](https://www.github.com/googleapis/nodejs-bigquery/commit/28cf08d74184b388cd8d18a9622630939c5f99cf)) +* extend definition of Query.types for simple named parameters ([#906](https://www.github.com/googleapis/nodejs-bigquery/issues/906)) ([#907](https://www.github.com/googleapis/nodejs-bigquery/issues/907)) ([44e1ac7](https://www.github.com/googleapis/nodejs-bigquery/commit/44e1ac7cf8604d79508316d70a3a98e2953d59f0)) +* handle null query parameter value ([#920](https://www.github.com/googleapis/nodejs-bigquery/issues/920)) ([3bf900a](https://www.github.com/googleapis/nodejs-bigquery/commit/3bf900a54a92c0422fa8f3c48480dc430a7d134d)) +* promise never returned on table.insert ([#953](https://www.github.com/googleapis/nodejs-bigquery/issues/953)) ([a138347](https://www.github.com/googleapis/nodejs-bigquery/commit/a138347855f74d4e5c889dc42b00992c4a3808a6)) + +## [5.6.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.5.0...v5.6.0) (2021-04-28) + + +### Features + +* Adds support for BIGNUMERIC type ([#904](https://www.github.com/googleapis/nodejs-bigquery/issues/904)) ([ef5552a](https://www.github.com/googleapis/nodejs-bigquery/commit/ef5552a5230240650fadd5bca8405a69b561a712)) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v7 ([#928](https://www.github.com/googleapis/nodejs-bigquery/issues/928)) ([2ce28c7](https://www.github.com/googleapis/nodejs-bigquery/commit/2ce28c7beec18d80a744e5dafaa0b8288041c35f)) +* update returned Job with API-determined location in getMetadata ([#900](https://www.github.com/googleapis/nodejs-bigquery/issues/900)) ([8c31358](https://www.github.com/googleapis/nodejs-bigquery/commit/8c313582595ba7819f1cebf01625b24814c38174)) + +## [5.5.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.4.0...v5.5.0) (2020-11-10) + + +### Features + +* Add support for Table IAM policies ([#892](https://www.github.com/googleapis/nodejs-bigquery/issues/892)) ([005422a](https://www.github.com/googleapis/nodejs-bigquery/commit/005422a07a46edd0eaf3fba3035753b42a86dadb)) + + +### Bug Fixes + +* update returned Job with API-determined location ([#890](https://www.github.com/googleapis/nodejs-bigquery/issues/890)) ([3894140](https://www.github.com/googleapis/nodejs-bigquery/commit/38941409c63221bf704ee8580ab3b032802ddc4e)) + +## [5.4.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.3.0...v5.4.0) (2020-11-02) + + +### Features + +* optionally wrap INT64 in BigQueryInt or provide a custom INT64 value type cast options ([#873](https://www.github.com/googleapis/nodejs-bigquery/issues/873)) ([be7c6e6](https://www.github.com/googleapis/nodejs-bigquery/commit/be7c6e6411e351bfab4b557fb34162470bbfd7f4)) + + +### Bug Fixes + +* Detect Geography type during parameterized query ([#877](https://www.github.com/googleapis/nodejs-bigquery/issues/877)) ([bc0ca69](https://www.github.com/googleapis/nodejs-bigquery/commit/bc0ca695a5b2d9df15df9383f6a791be30e851ec)) +* do not retry jobs.insert when it flakes ([#864](https://www.github.com/googleapis/nodejs-bigquery/issues/864)) ([255491b](https://www.github.com/googleapis/nodejs-bigquery/commit/255491b958171907695b10aca7e536d58a52354c)) +* return error when custom getQueryResults() timeout has been exceeded ([#872](https://www.github.com/googleapis/nodejs-bigquery/issues/872)) ([96f939c](https://www.github.com/googleapis/nodejs-bigquery/commit/96f939cefe2f31a5252002bbfecd5f503b32f841)) +* **deps:** update dependency big.js to v6 ([#862](https://www.github.com/googleapis/nodejs-bigquery/issues/862)) ([a47afb5](https://www.github.com/googleapis/nodejs-bigquery/commit/a47afb5c97115d0159ad94615a7997db15d03d01)) + +## [5.3.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.2.0...v5.3.0) (2020-09-30) + + +### Features + +* **constructor:** add option to set baseUrl using environment variable ([#849](https://www.github.com/googleapis/nodejs-bigquery/issues/849)) ([8c54f20](https://www.github.com/googleapis/nodejs-bigquery/commit/8c54f20777a902a343035fcf09e63978d71135ad)) +* allow setting BigQuery Job labels in createQueryJob method ([#865](https://www.github.com/googleapis/nodejs-bigquery/issues/865)) ([be074e7](https://www.github.com/googleapis/nodejs-bigquery/commit/be074e72ae1907f0649fbc5e085e22a31c3a6393)) + + +### Bug Fixes + +* **deps:** update dependency yargs to v16 ([#854](https://www.github.com/googleapis/nodejs-bigquery/issues/854)) ([58dcf34](https://www.github.com/googleapis/nodejs-bigquery/commit/58dcf34d8d22b4b5c9e488935b75eeaf8c8fd69e)) +* **perf:** disable prettyPrint for slimmer API responses ([#860](https://www.github.com/googleapis/nodejs-bigquery/issues/860)) ([1e56383](https://www.github.com/googleapis/nodejs-bigquery/commit/1e56383da5e6d8ce1335a711b32fea1155bddada)) + +## [5.2.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.1.0...v5.2.0) (2020-08-13) + + +### Features + +* model extract ([#832](https://www.github.com/googleapis/nodejs-bigquery/issues/832)) ([1541e98](https://www.github.com/googleapis/nodejs-bigquery/commit/1541e98076ee33da7d7e5f5a10d3ea45fc393736)) + +## [5.1.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.0.1...v5.1.0) (2020-07-27) + + +### Features + +* detect param type if not in provided types ([#813](https://www.github.com/googleapis/nodejs-bigquery/issues/813)) ([1e5a4cc](https://www.github.com/googleapis/nodejs-bigquery/commit/1e5a4cc0e7927dfe9690842e564982bfbef9310f)) + + +### Bug Fixes + +* add string type for query types ([#827](https://www.github.com/googleapis/nodejs-bigquery/issues/827)) ([acdecbd](https://www.github.com/googleapis/nodejs-bigquery/commit/acdecbd06f6a6ac1e5d1d8d0cd68afcb9a4d3ba7)) +* move gitattributes files to node templates ([#829](https://www.github.com/googleapis/nodejs-bigquery/issues/829)) ([f26e641](https://www.github.com/googleapis/nodejs-bigquery/commit/f26e64100e543cb520bcd7cd99913eca68e84af5)) + +### [5.0.1](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.0.0...v5.0.1) (2020-07-07) + + +### Bug Fixes + +* add tests for Routine ([#807](https://www.github.com/googleapis/nodejs-bigquery/issues/807)) ([c969f3d](https://www.github.com/googleapis/nodejs-bigquery/commit/c969f3d15d4e545b9efd92c4f8a9649216cbd927)) + +## [5.0.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.7.0...v5.0.0) (2020-06-19) + + +### ⚠ BREAKING CHANGES + +* don't return Stream from createLoadJob (#647) +* drop Node 8 from engines field (#662) + +### Features + +* drop Node 8 from engines field ([#662](https://www.github.com/googleapis/nodejs-bigquery/issues/662)) ([712b029](https://www.github.com/googleapis/nodejs-bigquery/commit/712b0294c6329545de70febb48762abd8b0567b9)) +* improved types ([40087fa](https://www.github.com/googleapis/nodejs-bigquery/commit/40087fa40f1e9a4180da7aaa43e2bb8a018bd632)) +* update types.d.ts ([#667](https://www.github.com/googleapis/nodejs-bigquery/issues/667)) ([a12b094](https://www.github.com/googleapis/nodejs-bigquery/commit/a12b094d2e6e48049203c9cd773fecb98713a3fa)), closes [#662](https://www.github.com/googleapis/nodejs-bigquery/issues/662) [#662](https://www.github.com/googleapis/nodejs-bigquery/issues/662) [#647](https://www.github.com/googleapis/nodejs-bigquery/issues/647) [#647](https://www.github.com/googleapis/nodejs-bigquery/issues/647) [#640](https://www.github.com/googleapis/nodejs-bigquery/issues/640) [#640](https://www.github.com/googleapis/nodejs-bigquery/issues/640) [#647](https://www.github.com/googleapis/nodejs-bigquery/issues/647) [#661](https://www.github.com/googleapis/nodejs-bigquery/issues/661) [#661](https://www.github.com/googleapis/nodejs-bigquery/issues/661) [#658](https://www.github.com/googleapis/nodejs-bigquery/issues/658) [#658](https://www.github.com/googleapis/nodejs-bigquery/issues/658) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#657](https://www.github.com/googleapis/nodejs-bigquery/issues/657) [#657](https://www.github.com/googleapis/nodejs-bigquery/issues/657) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#660](https://www.github.com/googleapis/nodejs-bigquery/issues/660) [#660](https://www.github.com/googleapis/nodejs-bigquery/issues/660) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) [#665](https://www.github.com/googleapis/nodejs-bigquery/issues/665) [#665](https://www.github.com/googleapis/nodejs-bigquery/issues/665) [#454](https://www.github.com/googleapis/nodejs-bigquery/issues/454) [#454](https://www.github.com/googleapis/nodejs-bigquery/issues/454) [#456](https://www.github.com/googleapis/nodejs-bigquery/issues/456) [#456](https://www.github.com/googleapis/nodejs-bigquery/issues/456) [#463](https://www.github.com/googleapis/nodejs-bigquery/issues/463) [#463](https://www.github.com/googleapis/nodejs-bigquery/issues/463) +* **userAgent:** allow for optional user agent to be provided ([#671](https://www.github.com/googleapis/nodejs-bigquery/issues/671)) ([25aeca8](https://www.github.com/googleapis/nodejs-bigquery/commit/25aeca8f6c136d03d6029bda54e7cdab98af80ca)) + + +### Bug Fixes + +* add types for hasDrift and seasonalPeriods ([#680](https://www.github.com/googleapis/nodejs-bigquery/issues/680)) ([d6c9566](https://www.github.com/googleapis/nodejs-bigquery/commit/d6c95667577df2d32bff6f9d07117d011379ecd2)) +* **deps:** update dependency @google-cloud/paginator to v3 ([#658](https://www.github.com/googleapis/nodejs-bigquery/issues/658)) ([a09c493](https://www.github.com/googleapis/nodejs-bigquery/commit/a09c493f1f94e4a0272c17cb62009c92945c20d0)) +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-bigquery/issues/468)) ([#669](https://www.github.com/googleapis/nodejs-bigquery/issues/669)) ([d3ed602](https://www.github.com/googleapis/nodejs-bigquery/commit/d3ed602e47ba005ca4c9d2f382867d19336f239d)) +* drop dependency on string-format-obj ([#698](https://www.github.com/googleapis/nodejs-bigquery/issues/698)) ([cf8f58f](https://www.github.com/googleapis/nodejs-bigquery/commit/cf8f58f851a8e32a4857f35c05a081cd031be124)) +* load job to a different project ID ([#748](https://www.github.com/googleapis/nodejs-bigquery/issues/748)) ([bfb74ad](https://www.github.com/googleapis/nodejs-bigquery/commit/bfb74add1850925837fa1737fded8642c80f0356)) +* **docs:** fix link for job configuration load ([#678](https://www.github.com/googleapis/nodejs-bigquery/issues/678)) ([ea3d7af](https://www.github.com/googleapis/nodejs-bigquery/commit/ea3d7afe18f8f22c6541043c92c26625ae9e0e85)) +* selectedFields on getRows not working correctly ([#712](https://www.github.com/googleapis/nodejs-bigquery/issues/712)) ([13b7e39](https://www.github.com/googleapis/nodejs-bigquery/commit/13b7e391cb3cfd87caec094f058143842cb39306)) +* **deps:** update dependency @google-cloud/promisify to v2 ([#657](https://www.github.com/googleapis/nodejs-bigquery/issues/657)) ([5d8112c](https://www.github.com/googleapis/nodejs-bigquery/commit/5d8112c2cd3994d1d32102d63a7a90fb9478223c)) +* **deps:** update dependency @google-cloud/storage to v5 ([#700](https://www.github.com/googleapis/nodejs-bigquery/issues/700)) ([a2e34ef](https://www.github.com/googleapis/nodejs-bigquery/commit/a2e34ef32a79c0dccaa11954ca2fa3f90795c63a)) +* **deps:** update dependency google-auth-library to v6 ([#660](https://www.github.com/googleapis/nodejs-bigquery/issues/660)) ([3ea642e](https://www.github.com/googleapis/nodejs-bigquery/commit/3ea642ec9f1c471bff0d5d095fcc3e1b3813e52a)) +* **docs:** configuration.copy link ([#709](https://www.github.com/googleapis/nodejs-bigquery/issues/709)) ([4a81b1e](https://www.github.com/googleapis/nodejs-bigquery/commit/4a81b1e25c9b8f09eca28142bd54f6ca42b1f866)) +* **docs:** correct createTablePartitioned sample argument ([#701](https://www.github.com/googleapis/nodejs-bigquery/issues/701)) ([9a7520e](https://www.github.com/googleapis/nodejs-bigquery/commit/9a7520e62ebe7f561190de0a3c1080bbc07567ba)) +* **table:** add retries for insert partial failures ([#589](https://www.github.com/googleapis/nodejs-bigquery/issues/589)) ([b8639c2](https://www.github.com/googleapis/nodejs-bigquery/commit/b8639c27009aaa4eb03bbd9ebf0fa1463e2bcd2b)), closes [#655](https://www.github.com/googleapis/nodejs-bigquery/issues/655) +* **types:** drop changes for drift and seasonal ([#681](https://www.github.com/googleapis/nodejs-bigquery/issues/681)) ([679d990](https://www.github.com/googleapis/nodejs-bigquery/commit/679d990f391433fbef180a4bbba2e32442e358da)) + + +### Code Refactoring + +* don't return Stream from createLoadJob ([#647](https://www.github.com/googleapis/nodejs-bigquery/issues/647)) ([8e26fb5](https://www.github.com/googleapis/nodejs-bigquery/commit/8e26fb561a9595e0f05e0506cebb71aa1eaba432)), closes [#640](https://www.github.com/googleapis/nodejs-bigquery/issues/640) [#640](https://www.github.com/googleapis/nodejs-bigquery/issues/640) + +## [4.7.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.6.1...v4.7.0) (2020-01-30) + + +### Features + +* add support for scripting/routines ([#580](https://www.github.com/googleapis/nodejs-bigquery/issues/580)) ([63d7e24](https://www.github.com/googleapis/nodejs-bigquery/commit/63d7e24bd9347f7b5202127afc1e92be34819a77)) +* **params:** adds optional param types ([#599](https://www.github.com/googleapis/nodejs-bigquery/issues/599)) ([008946a](https://www.github.com/googleapis/nodejs-bigquery/commit/008946a05b8d1d54add31a25cc52aba2a61448a8)) + +### [4.6.1](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.6.0...v4.6.1) (2020-01-13) + + +### Bug Fixes + +* don't modify the constructor options ([#607](https://www.github.com/googleapis/nodejs-bigquery/issues/607)) ([7df0799](https://www.github.com/googleapis/nodejs-bigquery/commit/7df0799e09e2a3a44f9ac4a04d157b7c85816fbe)) + +## [4.6.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.5.0...v4.6.0) (2020-01-05) + + +### Features + +* types to support slot ms and read masks ([#592](https://www.github.com/googleapis/nodejs-bigquery/issues/592)) ([84d1c82](https://www.github.com/googleapis/nodejs-bigquery/commit/84d1c82981a2f3444836dde5d8fd00a23ee1cf94)) + +## [4.5.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.4.0...v4.5.0) (2019-12-05) + + +### Features + +* **table:** allow opting out of default insert id ([#582](https://www.github.com/googleapis/nodejs-bigquery/issues/582)) ([6bf2dbd](https://www.github.com/googleapis/nodejs-bigquery/commit/6bf2dbd1ec09689338ee21b1d8666a4e8b2a7367)) +* adds policyTags parameter removes IGetParams Interface ([#576](https://www.github.com/googleapis/nodejs-bigquery/issues/576)) ([8cf8f1d](https://www.github.com/googleapis/nodejs-bigquery/commit/8cf8f1d15cd53406ac911fef512f69132d823873)) + + +### Bug Fixes + +* **deps:** TypeScript 3.7.0 causes breaking change in typings ([#586](https://www.github.com/googleapis/nodejs-bigquery/issues/586)) ([04f8cba](https://www.github.com/googleapis/nodejs-bigquery/commit/04f8cba7c86675fd7e12bb5ac4235f56745c033f)) +* **deps:** update dependency yargs to v15 ([#579](https://www.github.com/googleapis/nodejs-bigquery/issues/579)) ([92119e3](https://www.github.com/googleapis/nodejs-bigquery/commit/92119e3b23874263d9529283194a149b358b7c9f)) +* **docs:** snippets are now replaced in jsdoc comments ([#573](https://www.github.com/googleapis/nodejs-bigquery/issues/573)) ([a0d3538](https://www.github.com/googleapis/nodejs-bigquery/commit/a0d3538ad83b356918cabbd2bbfaf405e0a8272d)) + +## [4.4.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.3.0...v4.4.0) (2019-11-08) + + +### Features + +* **table:** typescript support for range partitioning ([#559](https://www.github.com/googleapis/nodejs-bigquery/issues/559)) ([a77c28a](https://www.github.com/googleapis/nodejs-bigquery/commit/a77c28a3e8b84760d67c4381008424103dcd1db7)) +* typescript support for data split result ([#570](https://www.github.com/googleapis/nodejs-bigquery/issues/570)) ([2236545](https://www.github.com/googleapis/nodejs-bigquery/commit/223654555ed9113683781883c65ffa7ee2f1ea5b)) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/storage to v4 ([#561](https://www.github.com/googleapis/nodejs-bigquery/issues/561)) ([0ec07f9](https://www.github.com/googleapis/nodejs-bigquery/commit/0ec07f994e0e9567025d1c96ad65f9a057a65344)) + +## [4.3.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.2.1...v4.3.0) (2019-10-09) + + +### Features + +* **TypeScript:** introduce IArimaResult interface ([4cd3a71](https://www.github.com/googleapis/nodejs-bigquery/commit/4cd3a71)) + +### [4.2.1](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.2.0...v4.2.1) (2019-09-16) + + +### Bug Fixes + +* **deps:** update dependency discovery-tsd to ^0.2.0 ([#540](https://www.github.com/googleapis/nodejs-bigquery/issues/540)) ([651e870](https://www.github.com/googleapis/nodejs-bigquery/commit/651e870)) + +## [4.2.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.8...v4.2.0) (2019-09-07) + + +### Bug Fixes + +* **deps:** update dependency yargs to v14 ([#520](https://www.github.com/googleapis/nodejs-bigquery/issues/520)) ([9dd59a6](https://www.github.com/googleapis/nodejs-bigquery/commit/9dd59a6)) +* **types:** update to the latest discovery types ([#518](https://www.github.com/googleapis/nodejs-bigquery/issues/518)) ([dccf2cf](https://www.github.com/googleapis/nodejs-bigquery/commit/dccf2cf)) +* update root url to `bigquery.googleapis.com` ([#531](https://www.github.com/googleapis/nodejs-bigquery/issues/531)) ([277940f](https://www.github.com/googleapis/nodejs-bigquery/commit/277940f)) + + +### Features + +* **typescript:** generate latest request/response types ([#528](https://www.github.com/googleapis/nodejs-bigquery/issues/528)) ([f8d2f4d](https://www.github.com/googleapis/nodejs-bigquery/commit/f8d2f4d)) + +### [4.1.8](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.7...v4.1.8) (2019-08-02) + + +### Bug Fixes + +* allow calls with no request, add JSON proto ([885a98a](https://www.github.com/googleapis/nodejs-bigquery/commit/885a98a)) + +### [4.1.7](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.6...v4.1.7) (2019-08-01) + + +### Bug Fixes + +* **docs:** duplicate readme sample names ([#512](https://www.github.com/googleapis/nodejs-bigquery/issues/512)) ([56040f5](https://www.github.com/googleapis/nodejs-bigquery/commit/56040f5)) +* **docs:** fix formatting of the docs ([#513](https://www.github.com/googleapis/nodejs-bigquery/issues/513)) ([d823014](https://www.github.com/googleapis/nodejs-bigquery/commit/d823014)) + +### [4.1.6](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.5...v4.1.6) (2019-07-29) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/storage to v3 ([#508](https://www.github.com/googleapis/nodejs-bigquery/issues/508)) ([bdca2ea](https://www.github.com/googleapis/nodejs-bigquery/commit/bdca2ea)) + +### [4.1.5](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.4...v4.1.5) (2019-07-17) + + +### Performance Improvements + +* pull in paginator refactor ([#499](https://www.github.com/googleapis/nodejs-bigquery/issues/499)) ([8daafcc](https://www.github.com/googleapis/nodejs-bigquery/commit/8daafcc)) + +### [4.1.4](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.3...v4.1.4) (2019-07-02) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#486](https://www.github.com/googleapis/nodejs-bigquery/issues/486)) ([a76cc5b](https://www.github.com/googleapis/nodejs-bigquery/commit/a76cc5b)) + +### [4.1.3](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.2...v4.1.3) (2019-06-17) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#479](https://www.github.com/googleapis/nodejs-bigquery/issues/479)) ([7db57d2](https://www.github.com/googleapis/nodejs-bigquery/commit/7db57d2)) + +### [4.1.2](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.1...v4.1.2) (2019-06-11) + + +### Bug Fixes + +* link to new googleapis.dev docs ([#477](https://www.github.com/googleapis/nodejs-bigquery/issues/477)) ([9dfcda0](https://www.github.com/googleapis/nodejs-bigquery/commit/9dfcda0)) + +### [4.1.1](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.1.0...v4.1.1) (2019-05-30) + + +### Bug Fixes + +* **job:** remove job instance from request params ([#465](https://www.github.com/googleapis/nodejs-bigquery/issues/465)) ([27f080d](https://www.github.com/googleapis/nodejs-bigquery/commit/27f080d)) +* correct name in .repo-metadata.json ([#467](https://www.github.com/googleapis/nodejs-bigquery/issues/467)) ([6add722](https://www.github.com/googleapis/nodejs-bigquery/commit/6add722)) + +## [4.1.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v4.0.0...v4.1.0) (2019-05-29) + + +### Features + +* **model:** dataset model support ([#449](https://www.github.com/googleapis/nodejs-bigquery/issues/449)) ([3ad884f](https://www.github.com/googleapis/nodejs-bigquery/commit/3ad884f)) +* accept apiEndpoint override ([#455](https://www.github.com/googleapis/nodejs-bigquery/issues/455)) ([1eda8ff](https://www.github.com/googleapis/nodejs-bigquery/commit/1eda8ff)) + +## [4.0.0](https://www.github.com/googleapis/nodejs-bigquery/compare/v3.0.0...v4.0.0) (2019-05-20) + + +### ⚠ BREAKING CHANGES + +* **deps:** this will ship async/await with the generated code. +* upgrade engines field to >=8.10.0 (#424) +* This removes the `autoCreate` option which may result in a breaking change for TypeScript users. + +### Bug Fixes + +* **deps:** update dependency @google-cloud/common to ^0.32.0 ([8e28b62](https://www.github.com/googleapis/nodejs-bigquery/commit/8e28b62)), closes [#8203](https://www.github.com/googleapis/nodejs-bigquery/issues/8203) +* **deps:** update dependency @google-cloud/common to v1 ([#434](https://www.github.com/googleapis/nodejs-bigquery/issues/434)) ([0e4aeef](https://www.github.com/googleapis/nodejs-bigquery/commit/0e4aeef)) +* **deps:** update dependency @google-cloud/paginator to v1 ([#428](https://www.github.com/googleapis/nodejs-bigquery/issues/428)) ([5d925af](https://www.github.com/googleapis/nodejs-bigquery/commit/5d925af)) +* **deps:** update dependency @google-cloud/promisify to v1 ([#427](https://www.github.com/googleapis/nodejs-bigquery/issues/427)) ([fdeb862](https://www.github.com/googleapis/nodejs-bigquery/commit/fdeb862)) +* **deps:** update dependency arrify to v2 ([de0f687](https://www.github.com/googleapis/nodejs-bigquery/commit/de0f687)) +* **table:** allow for TableSchema to be used ([#438](https://www.github.com/googleapis/nodejs-bigquery/issues/438)) ([7995be0](https://www.github.com/googleapis/nodejs-bigquery/commit/7995be0)) +* **types:** correct interface ([#407](https://www.github.com/googleapis/nodejs-bigquery/issues/407)) ([da5ed01](https://www.github.com/googleapis/nodejs-bigquery/commit/da5ed01)) +* correctly encode nested struct/array params ([#439](https://www.github.com/googleapis/nodejs-bigquery/issues/439)) ([d7006bd](https://www.github.com/googleapis/nodejs-bigquery/commit/d7006bd)) +* remove teeny-request as a direct dependency ([#412](https://www.github.com/googleapis/nodejs-bigquery/issues/412)) ([c6de54a](https://www.github.com/googleapis/nodejs-bigquery/commit/c6de54a)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#424](https://www.github.com/googleapis/nodejs-bigquery/issues/424)) ([cea017e](https://www.github.com/googleapis/nodejs-bigquery/commit/cea017e)) + + +### Code Refactoring + +* drop autoCreate in table.insert in favor of schema ([#421](https://www.github.com/googleapis/nodejs-bigquery/issues/421)) ([b59cd7f](https://www.github.com/googleapis/nodejs-bigquery/commit/b59cd7f)) + + +### Miscellaneous Chores + +* **deps:** update dependency gts to v1 ([#419](https://www.github.com/googleapis/nodejs-bigquery/issues/419)) ([7b0e76a](https://www.github.com/googleapis/nodejs-bigquery/commit/7b0e76a)) + +## v3.0.0 + +04-02-2019 10:02 PDT + + +### Implementation Changes + +- fix(job): check for `errorResult` when polling jobs ([#387](https://github.com/googleapis/nodejs-bigquery/pull/387)) + + +**BREAKING CHANGE** Previously when polling a BigQuery Job the Node.js client would check for the presence of the `errors` field when trying to determine if the job suceeded. We have since changed this logic to instead check for the `errorResult` field. This is significant because the `errors` array may now be present for passing jobs, however these errors should serve more as warnings. If your application logic depended on this functionality you'll need to manually check for `errors` now. + +```js +await job.promise(); + +if (job.metadata.status.errors) { + // optionally handle warnings +} +``` + +- fix(ts): provide complete and correct types ([#385](https://github.com/googleapis/nodejs-bigquery/pull/385)) + +**BREAKING CHANGE** A number of the BigQuery TypeScript types were incomplete, this change provides more complete types for the entire client. + +### New Features +- feat(geo): add support for geography ([#397](https://github.com/googleapis/nodejs-bigquery/pull/397)) + +### Bug Fixes +- fix: correctly encode nested custom date/time parameters ([#393](https://github.com/googleapis/nodejs-bigquery/pull/393)) + +### Dependencies +- chore(deps): update dependency tmp to v0.1.0 ([#398](https://github.com/googleapis/nodejs-bigquery/pull/398)) +- chore(deps): update dependency @types/tmp to v0.1.0 +- chore(deps): update dependency typescript to ~3.4.0 + +### Documentation +- docs(samples): adds queryParamsNamed and queryParamsPositional ([#381](https://github.com/googleapis/nodejs-bigquery/pull/381)) +- refactor(samples): split query and table samples into separate files ([#384](https://github.com/googleapis/nodejs-bigquery/pull/384)) +- refactor(samples): fix loadJSONFromGCSTruncate wrong function ([#386](https://github.com/googleapis/nodejs-bigquery/pull/386)) +- refactor(samples): add main() function wrappers to samples + +### Internal / Testing Changes +- build: use per-repo npm publish token ([#382](https://github.com/googleapis/nodejs-bigquery/pull/382)) +- chore: publish to npm using wombat ([#390](https://github.com/googleapis/nodejs-bigquery/pull/390)) +- fix(tests): update TIMESTAMP param tests ([#394](https://github.com/googleapis/nodejs-bigquery/pull/394)) + ## v2.1.0 03-12-2019 15:30 PDT @@ -279,4 +639,3 @@ BigQuery ORC: - chore: lock files ([#109](https://github.com/googleapis/nodejs-bigquery/pull/109)) - chore: timeout for system test ([#107](https://github.com/googleapis/nodejs-bigquery/pull/107)) - chore: lock files maintenance ([#106](https://github.com/googleapis/nodejs-bigquery/pull/106)) - diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 46b2a08e..2add2547 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,43 +1,94 @@ -# Contributor Code of Conduct + +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 78aaa61b..a6bf63b2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -34,8 +34,17 @@ accept your pull requests. 1. Ensure that your code adheres to the existing style in the code to which you are contributing. 1. Ensure that your code has an appropriate set of tests which all pass. +1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. 1. Submit a pull request. +### Before you begin + +1. [Select or create a Cloud Platform project][projects]. +1. [Enable the Google BigQuery API][enable_api]. +1. [Set up authentication with a service account][auth] so you can access the + API from your local workstation. + + ## Running the tests 1. [Prepare your environment for Node.js setup][setup]. @@ -46,10 +55,21 @@ accept your pull requests. 1. Run the tests: + # Run unit tests. npm test + # Run sample integration tests. + npm run samples-test + + # Run all system tests. + npm run system-test + 1. Lint (and maybe fix) any changes: npm run fix [setup]: https://cloud.google.com/nodejs/docs/setup +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquery.googleapis.com +[auth]: https://cloud.google.com/docs/authentication/getting-started \ No newline at end of file diff --git a/README.md b/README.md index 1f792f92..770ca21c 100644 --- a/README.md +++ b/README.md @@ -1,77 +1,220 @@ [//]: # "This README.md file is auto-generated, all changes to this file will be lost." -[//]: # "To regenerate it, use `npm run generate-scaffolding`." +[//]: # "To regenerate it, use `python -m synthtool`." Google Cloud Platform logo # [Google BigQuery: Node.js Client](https://github.com/googleapis/nodejs-bigquery) -[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) [![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery.svg)](https://www.npmjs.org/package/@google-cloud/bigquery) [![codecov](https://img.shields.io/codecov/c/github/googleapis/nodejs-bigquery/master.svg?style=flat)](https://codecov.io/gh/googleapis/nodejs-bigquery) -[BigQuery](https://cloud.google.com/bigquery/docs) is Google's fully managed, petabyte scale, low cost analytics data warehouse. BigQuery is NoOps—there is no infrastructure to manage and you don't need a database administrator—so you can focus on analyzing data to find meaningful insights, use familiar SQL, and take advantage of our pay-as-you-go model. -* [Using the client library](#using-the-client-library) + +Google BigQuery Client Library for Node.js + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-bigquery/blob/master/CHANGELOG.md). + +* [Google BigQuery Node.js Client API Reference][client-docs] +* [Google BigQuery Documentation][product-docs] +* [github.com/googleapis/nodejs-bigquery](https://github.com/googleapis/nodejs-bigquery) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + * [Before you begin](#before-you-begin) + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) * [Samples](#samples) * [Versioning](#versioning) * [Contributing](#contributing) * [License](#license) -## Using the client library - -1. [Select or create a Cloud Platform project][projects]. +## Quickstart -1. [Enable billing for your project][billing]. +### Before you begin +1. [Select or create a Cloud Platform project][projects]. 1. [Enable the Google BigQuery API][enable_api]. - 1. [Set up authentication with a service account][auth] so you can access the API from your local workstation. -1. Install the client library: +### Installing the client library + +```bash +npm install @google-cloud/bigquery +``` - npm install --save @google-cloud/bigquery -1. Try an example: +### Using the client library ```javascript - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); +// Imports the Google Cloud client library +const {BigQuery} = require('@google-cloud/bigquery'); +async function createDataset() { // Creates a client - const bigquery = new BigQuery(); + const bigqueryClient = new BigQuery(); // Create the dataset - const [dataset] = await bigquery.createDataset(datasetName); + const [dataset] = await bigqueryClient.createDataset(datasetName); console.log(`Dataset ${dataset.id} created.`); +} +createDataset(); + ``` + + ## Samples -Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery/tree/master/samples) directory. The samples' `README.md` -has instructions for running the samples. +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery/tree/master/samples) directory. Each sample's `README.md` has instructions for running its sample. | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | -| Dataset create | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createDataset.js,samples/README.md) | -| Dataset delete | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteDataset.js,samples/README.md) | -| Dataset list | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listDatasets.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listDatasets.js,samples/README.md) | -| Tables | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/tables.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/tables.js,samples/README.md) | -| Queries | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queries.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queries.js,samples/README.md) | - -The [BigQuery Node.js Client API Reference][client-docs] documentation +| Add Column Load Append | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/addColumnLoadAppend.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/addColumnLoadAppend.js,samples/README.md) | +| Add Column Query Append | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/addColumnQueryAppend.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/addColumnQueryAppend.js,samples/README.md) | +| Add Empty Column | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/addEmptyColumn.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/addEmptyColumn.js,samples/README.md) | +| Auth View Tutorial | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/authViewTutorial.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/authViewTutorial.js,samples/README.md) | +| Browse Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/browseTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/browseTable.js,samples/README.md) | +| Cancel Job | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/cancelJob.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/cancelJob.js,samples/README.md) | +| Client JSON Credentials | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/clientJSONCredentials.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/clientJSONCredentials.js,samples/README.md) | +| Copy Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/copyTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/copyTable.js,samples/README.md) | +| Copy Table Multiple Source | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/copyTableMultipleSource.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/copyTableMultipleSource.js,samples/README.md) | +| Create Dataset | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createDataset.js,samples/README.md) | +| Create Job | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createJob.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createJob.js,samples/README.md) | +| Create Model | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createModel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createModel.js,samples/README.md) | +| Create Routine | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createRoutine.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createRoutine.js,samples/README.md) | +| Create Routine DDL | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createRoutineDDL.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createRoutineDDL.js,samples/README.md) | +| Create Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createTable.js,samples/README.md) | +| Create Table Partitioned | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createTablePartitioned.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createTablePartitioned.js,samples/README.md) | +| Create Table Range Partitioned | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createTableRangePartitioned.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createTableRangePartitioned.js,samples/README.md) | +| Create View | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createView.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createView.js,samples/README.md) | +| Ddl Create View | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/ddlCreateView.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/ddlCreateView.js,samples/README.md) | +| Delete Dataset | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteDataset.js,samples/README.md) | +| Delete Label Dataset | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteLabelDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteLabelDataset.js,samples/README.md) | +| Delete Label Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteLabelTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteLabelTable.js,samples/README.md) | +| Delete Model | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteModel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteModel.js,samples/README.md) | +| Delete Routine | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteRoutine.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteRoutine.js,samples/README.md) | +| Delete Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteTable.js,samples/README.md) | +| Extract Table Compressed | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/extractTableCompressed.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/extractTableCompressed.js,samples/README.md) | +| Extract Table JSON | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/extractTableJSON.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/extractTableJSON.js,samples/README.md) | +| Extract Table To GCS | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/extractTableToGCS.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/extractTableToGCS.js,samples/README.md) | +| Get Dataset | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getDataset.js,samples/README.md) | +| Get Dataset Labels | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getDatasetLabels.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getDatasetLabels.js,samples/README.md) | +| Get Job | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getJob.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getJob.js,samples/README.md) | +| BigQuery Get Model | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getModel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getModel.js,samples/README.md) | +| Get Routine | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getRoutine.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getRoutine.js,samples/README.md) | +| BigQuery Get Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getTable.js,samples/README.md) | +| Get Table Labels | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getTableLabels.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getTableLabels.js,samples/README.md) | +| Get View | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getView.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getView.js,samples/README.md) | +| Insert Rows As Stream | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/insertRowsAsStream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/insertRowsAsStream.js,samples/README.md) | +| Inserting Data Types | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/insertingDataTypes.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/insertingDataTypes.js,samples/README.md) | +| BigQuery Label Dataset | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/labelDataset.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/labelDataset.js,samples/README.md) | +| Label Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/labelTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/labelTable.js,samples/README.md) | +| List Datasets | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listDatasets.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listDatasets.js,samples/README.md) | +| List Datasets By Label | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listDatasetsByLabel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listDatasetsByLabel.js,samples/README.md) | +| List Jobs | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listJobs.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listJobs.js,samples/README.md) | +| BigQuery List Models | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listModels.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listModels.js,samples/README.md) | +| BigQuery List Models Streaming | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listModelsStreaming.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listModelsStreaming.js,samples/README.md) | +| List Routines | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listRoutines.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listRoutines.js,samples/README.md) | +| List Tables | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listTables.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listTables.js,samples/README.md) | +| Load CSV From GCS | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadCSVFromGCS.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadCSVFromGCS.js,samples/README.md) | +| Load CSV From GCS Autodetect | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadCSVFromGCSAutodetect.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadCSVFromGCSAutodetect.js,samples/README.md) | +| Load CSV From GCS Truncate | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadCSVFromGCSTruncate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadCSVFromGCSTruncate.js,samples/README.md) | +| Load JSON From GCS | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadJSONFromGCS.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadJSONFromGCS.js,samples/README.md) | +| Load JSON From GCS Autodetect | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadJSONFromGCSAutodetect.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadJSONFromGCSAutodetect.js,samples/README.md) | +| Load JSON From GCS Truncate | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadJSONFromGCSTruncate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadJSONFromGCSTruncate.js,samples/README.md) | +| Load Local File | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadLocalFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadLocalFile.js,samples/README.md) | +| Load Orc From GCS Truncate | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadOrcFromGCSTruncate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadOrcFromGCSTruncate.js,samples/README.md) | +| Load Parquet From GCS Truncate | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadParquetFromGCSTruncate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadParquetFromGCSTruncate.js,samples/README.md) | +| Load Table GCS Avro | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSAvro.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSAvro.js,samples/README.md) | +| Load Table GCS Avro Truncate | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSAvroTruncate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSAvroTruncate.js,samples/README.md) | +| Load Table GCSORC | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSORC.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSORC.js,samples/README.md) | +| Load Table GCS Parquet | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSParquet.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSParquet.js,samples/README.md) | +| Load Table Partitioned | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTablePartitioned.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTablePartitioned.js,samples/README.md) | +| Nested Repeated Schema | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/nestedRepeatedSchema.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/nestedRepeatedSchema.js,samples/README.md) | +| Query | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/query.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/query.js,samples/README.md) | +| Query Batch | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryBatch.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryBatch.js,samples/README.md) | +| Query Destination Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryDestinationTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryDestinationTable.js,samples/README.md) | +| Query Disable Cache | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryDisableCache.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryDisableCache.js,samples/README.md) | +| Query Dry Run | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryDryRun.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryDryRun.js,samples/README.md) | +| Query External GCS Perm | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryExternalGCSPerm.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryExternalGCSPerm.js,samples/README.md) | +| Query Legacy | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryLegacy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryLegacy.js,samples/README.md) | +| Query Legacy Large Results | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryLegacyLargeResults.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryLegacyLargeResults.js,samples/README.md) | +| Query Pagination | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryPagination.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryPagination.js,samples/README.md) | +| Query Params Arrays | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsArrays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsArrays.js,samples/README.md) | +| Query Params Named | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsNamed.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsNamed.js,samples/README.md) | +| Query Params Named Types | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsNamedTypes.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsNamedTypes.js,samples/README.md) | +| Query Params Positional | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsPositional.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsPositional.js,samples/README.md) | +| Query Params Positional Types | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsPositionalTypes.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsPositionalTypes.js,samples/README.md) | +| Query Params Structs | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsStructs.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsStructs.js,samples/README.md) | +| Query Params Timestamps | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsTimestamps.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsTimestamps.js,samples/README.md) | +| Query Stack Overflow | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryStackOverflow.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryStackOverflow.js,samples/README.md) | +| Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| Relax Column | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/relaxColumn.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/relaxColumn.js,samples/README.md) | +| Relax Column Load Append | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/relaxColumnLoadAppend.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/relaxColumnLoadAppend.js,samples/README.md) | +| Relax Column Query Append | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/relaxColumnQueryAppend.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/relaxColumnQueryAppend.js,samples/README.md) | +| Set User Agent | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/setUserAgent.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/setUserAgent.js,samples/README.md) | +| Undelete Table | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/undeleteTable.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/undeleteTable.js,samples/README.md) | +| Update Dataset Access | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateDatasetAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateDatasetAccess.js,samples/README.md) | +| Update Dataset Description | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateDatasetDescription.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateDatasetDescription.js,samples/README.md) | +| Update Dataset Expiration | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateDatasetExpiration.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateDatasetExpiration.js,samples/README.md) | +| BigQuery Update Model | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateModel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateModel.js,samples/README.md) | +| Update Routine | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateRoutine.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateRoutine.js,samples/README.md) | +| Update Table Description | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateTableDescription.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateTableDescription.js,samples/README.md) | +| Update Table Expiration | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateTableExpiration.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateTableExpiration.js,samples/README.md) | +| Update View Query | [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateViewQuery.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateViewQuery.js,samples/README.md) | + + + +The [Google BigQuery Node.js Client API Reference][client-docs] documentation also contains samples. +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed via npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. + +_Legacy Node.js versions are supported as a best effort:_ + +* Legacy versions will not be tested in continuous integration. +* Some security patches may not be able to be backported. +* Dependencies will not be kept up-to-date, and features will not be backported. + +#### Legacy tags available + +* `legacy-8`: install client libraries from this dist-tag for versions + compatible with Node.js 8. + ## Versioning This library follows [Semantic Versioning](http://semver.org/). + This library is considered to be **General Availability (GA)**. This means it is stable; the code surface will not change in backwards-incompatible ways unless absolutely necessary (e.g. because of critical security issues) or with an extensive deprecation period. Issues and requests against **GA** libraries are addressed with the highest priority. + + + + More Information: [Google Cloud Platform Launch Stages][launch_stages] [launch_stages]: https://cloud.google.com/terms/launch-stages @@ -80,27 +223,22 @@ More Information: [Google Cloud Platform Launch Stages][launch_stages] Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery/blob/master/CONTRIBUTING.md). +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its template in this +[directory](https://github.com/googleapis/synthtool/tree/master/synthtool/gcp/templates/node_library). + ## License Apache Version 2.0 See [LICENSE](https://github.com/googleapis/nodejs-bigquery/blob/master/LICENSE) -## What's Next - -* [BigQuery Documentation][product-docs] -* [BigQuery Node.js Client API Reference][client-docs] -* [github.com/googleapis/nodejs-bigquery](https://github.com/googleapis/nodejs-bigquery) - -Read more about the client libraries for Cloud APIs, including the older -Google APIs Client Libraries, in [Client Libraries Explained][explained]. - -[explained]: https://cloud.google.com/apis/docs/client-libraries-explained - -[client-docs]: https://cloud.google.com/nodejs/docs/reference/bigquery/latest/ -[product-docs]: https://cloud.google.com/bigquery/docs +[client-docs]: https://googleapis.dev/nodejs/bigquery/latest +[product-docs]: https://cloud.google.com/bigquery [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png [projects]: https://console.cloud.google.com/project [billing]: https://support.google.com/cloud/answer/6293499#enable-billing -[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquery-json.googleapis.com +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquery.googleapis.com [auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..8b58ae9c --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/api-extractor.json b/api-extractor.json new file mode 100644 index 00000000..de228294 --- /dev/null +++ b/api-extractor.json @@ -0,0 +1,369 @@ +/** + * Config file for API Extractor. For more info, please visit: https://api-extractor.com + */ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + + /** + * Optionally specifies another JSON config file that this file extends from. This provides a way for + * standard settings to be shared across multiple projects. + * + * If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains + * the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be + * resolved using NodeJS require(). + * + * SUPPORTED TOKENS: none + * DEFAULT VALUE: "" + */ + // "extends": "./shared/api-extractor-base.json" + // "extends": "my-package/include/api-extractor-base.json" + + /** + * Determines the "" token that can be used with other config file settings. The project folder + * typically contains the tsconfig.json and package.json config files, but the path is user-defined. + * + * The path is resolved relative to the folder of the config file that contains the setting. + * + * The default value for "projectFolder" is the token "", which means the folder is determined by traversing + * parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder + * that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error + * will be reported. + * + * SUPPORTED TOKENS: + * DEFAULT VALUE: "" + */ + // "projectFolder": "..", + + /** + * (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor + * analyzes the symbols exported by this module. + * + * The file extension must be ".d.ts" and not ".ts". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + */ + "mainEntryPointFilePath": "/protos/protos.d.ts", + + /** + * A list of NPM package names whose exports should be treated as part of this package. + * + * For example, suppose that Webpack is used to generate a distributed bundle for the project "library1", + * and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part + * of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly + * imports library2. To avoid this, we can specify: + * + * "bundledPackages": [ "library2" ], + * + * This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been + * local files for library1. + */ + "bundledPackages": [ ], + + /** + * Determines how the TypeScript compiler engine will be invoked by API Extractor. + */ + "compiler": { + /** + * Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * Note: This setting will be ignored if "overrideTsconfig" is used. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/tsconfig.json" + */ + // "tsconfigFilePath": "/tsconfig.json", + + /** + * Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk. + * The object must conform to the TypeScript tsconfig schema: + * + * http://json.schemastore.org/tsconfig + * + * If omitted, then the tsconfig.json file will be read from the "projectFolder". + * + * DEFAULT VALUE: no overrideTsconfig section + */ + // "overrideTsconfig": { + // . . . + // } + + /** + * This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended + * and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when + * dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses + * for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck. + * + * DEFAULT VALUE: false + */ + // "skipLibCheck": true, + }, + + /** + * Configures how the API report file (*.api.md) will be generated. + */ + "apiReport": { + /** + * (REQUIRED) Whether to generate an API report. + */ + "enabled": true, + + /** + * The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce + * a full file path. + * + * The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/". + * + * SUPPORTED TOKENS: , + * DEFAULT VALUE: ".api.md" + */ + // "reportFileName": ".api.md", + + /** + * Specifies the folder where the API report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy, + * e.g. for an API review. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/etc/" + */ + // "reportFolder": "/etc/", + + /** + * Specifies the folder where the temporary report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * After the temporary file is written to disk, it is compared with the file in the "reportFolder". + * If they are different, a production build will fail. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/" + */ + // "reportTempFolder": "/temp/" + }, + + /** + * Configures how the doc model file (*.api.json) will be generated. + */ + "docModel": { + /** + * (REQUIRED) Whether to generate a doc model file. + */ + "enabled": true, + + /** + * The output path for the doc model file. The file extension should be ".api.json". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/.api.json" + */ + // "apiJsonFilePath": "/temp/.api.json" + }, + + /** + * Configures how the .d.ts rollup file will be generated. + */ + "dtsRollup": { + /** + * (REQUIRED) Whether to generate the .d.ts rollup file. + */ + "enabled": true, + + /** + * Specifies the output path for a .d.ts rollup file to be generated without any trimming. + * This file will include all declarations that are exported by the main entry point. + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/dist/.d.ts" + */ + // "untrimmedFilePath": "/dist/.d.ts", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release. + * This file will include only declarations that are marked as "@public" or "@beta". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "betaTrimmedFilePath": "/dist/-beta.d.ts", + + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release. + * This file will include only declarations that are marked as "@public". + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "publicTrimmedFilePath": "/dist/-public.d.ts", + + /** + * When a declaration is trimmed, by default it will be replaced by a code comment such as + * "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the + * declaration completely. + * + * DEFAULT VALUE: false + */ + // "omitTrimmingComments": true + }, + + /** + * Configures how the tsdoc-metadata.json file will be generated. + */ + "tsdocMetadata": { + /** + * Whether to generate the tsdoc-metadata.json file. + * + * DEFAULT VALUE: true + */ + // "enabled": true, + + /** + * Specifies where the TSDoc metadata file should be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * The default value is "", which causes the path to be automatically inferred from the "tsdocMetadata", + * "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup + * falls back to "tsdoc-metadata.json" in the package folder. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "tsdocMetadataFilePath": "/dist/tsdoc-metadata.json" + }, + + /** + * Specifies what type of newlines API Extractor should use when writing output files. By default, the output files + * will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead. + * To use the OS's default newline kind, specify "os". + * + * DEFAULT VALUE: "crlf" + */ + // "newlineKind": "crlf", + + /** + * Configures how API Extractor reports error and warning messages produced during analysis. + * + * There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages. + */ + "messages": { + /** + * Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing + * the input .d.ts files. + * + * TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "compilerMessageReporting": { + /** + * Configures the default routing for messages that don't match an explicit rule in this table. + */ + "default": { + /** + * Specifies whether the message should be written to the the tool's output log. Note that + * the "addToApiReportFile" property may supersede this option. + * + * Possible values: "error", "warning", "none" + * + * Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail + * and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes + * the "--local" option), the warning is displayed but the build will not fail. + * + * DEFAULT VALUE: "warning" + */ + "logLevel": "warning", + + /** + * When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md), + * then the message will be written inside that file; otherwise, the message is instead logged according to + * the "logLevel" option. + * + * DEFAULT VALUE: false + */ + // "addToApiReportFile": false + }, + + // "TS2551": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by API Extractor during its analysis. + * + * API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag" + * + * DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings + */ + "extractorMessageReporting": { + "default": { + "logLevel": "warning", + // "addToApiReportFile": false + }, + + // "ae-extra-release-tag": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by the TSDoc parser when analyzing code comments. + * + * TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "tsdocMessageReporting": { + "default": { + "logLevel": "warning", + // "addToApiReportFile": false + } + + // "tsdoc-link-tag-unescaped-text": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + } + } + +} diff --git a/benchmark/bench.ts b/benchmark/bench.ts index e4402ae0..108c455b 100644 --- a/benchmark/bench.ts +++ b/benchmark/bench.ts @@ -1,26 +1,25 @@ -/*! - * Copyright 2017 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. import * as fs from 'fs'; import {BigQuery} from '../src'; if (process.argv.length < 3) { throw new Error( - `need query file; ` + - `usage: '${process.argv[0]} ${process.argv[1]} '`); + 'need query file; ' + + `usage: '${process.argv[0]} ${process.argv[1]} '` + ); } console.log('query,rows,cols,first_byte,total'); @@ -29,11 +28,11 @@ const queryJson = fs.readFileSync(process.argv[2], 'utf8'); const queries = JSON.parse(queryJson); const client = new BigQuery(); -Promise - .all(queries.map((query: string) => { - return doQuery(query).catch(console.error); - })) - .catch(console.error); +Promise.all( + queries.map((query: string) => { + return doQuery(query).catch(console.error); + }) +).catch(console.error); async function doQuery(queryTxt: string) { return new Promise((resolve, reject) => { @@ -43,30 +42,31 @@ async function doQuery(queryTxt: string) { let timeFirstByteMilli: number; const query = {query: queryTxt, useLegacySql: false}; - const stream = - client.createQueryStream(query) - .on('error', reject) - .on('data', - row => { - if (numRows === 0) { - numCols = Object.keys(row).length; - timeFirstByteMilli = new Date().getTime() - startMilli; - } else if (numCols !== Object.keys(row).length) { - stream.end(); - const receivedCols = Object.keys(row).length; - const error = new Error( - `query "${queryTxt}": ` + - `wrong number of columns, want ${numCols} got ${ - receivedCols}`); - reject(error); - } - numRows++; - }) - .on('end', () => { - const timeTotalMilli = new Date().getTime() - startMilli; - console.log(`"${queryTxt}",${numRows},${numCols},${ - timeFirstByteMilli / 1000},${timeTotalMilli / 1000}`); - resolve(); - }); + const stream = client + .createQueryStream(query) + .on('error', reject) + .on('data', row => { + if (numRows === 0) { + numCols = Object.keys(row).length; + timeFirstByteMilli = new Date().getTime() - startMilli; + } else if (numCols !== Object.keys(row).length) { + stream.end(); + const receivedCols = Object.keys(row).length; + const error = new Error( + `query "${queryTxt}": ` + + `wrong number of columns, want ${numCols} got ${receivedCols}` + ); + reject(error); + } + numRows++; + }) + .on('end', () => { + const timeTotalMilli = new Date().getTime() - startMilli; + console.log( + `"${queryTxt}",${numRows},${numCols},${timeFirstByteMilli / + 1000},${timeTotalMilli / 1000}` + ); + resolve(); + }); }); } diff --git a/codecov.yaml b/codecov.yaml deleted file mode 100644 index 5724ea94..00000000 --- a/codecov.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -codecov: - ci: - - source.cloud.google.com diff --git a/linkinator.config.json b/linkinator.config.json new file mode 100644 index 00000000..72a21eea --- /dev/null +++ b/linkinator.config.json @@ -0,0 +1,11 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "http://goo.gl/f2SXcb", + "img.shields.io" + ], + "silent": true, + "concurrency": 10 +} diff --git a/package.json b/package.json index 23700859..6323a772 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,11 @@ { "name": "@google-cloud/bigquery", "description": "Google BigQuery Client Library for Node.js", - "version": "2.1.0", + "version": "5.7.0", "license": "Apache-2.0", "author": "Google LLC", "engines": { - "node": ">=6.0.0" + "node": ">=10" }, "repository": "googleapis/nodejs-bigquery", "main": "./build/src/index.js", @@ -30,75 +30,65 @@ "prebenchmark": "npm run compile", "benchmark": "node build/benchmark/bench.js benchmark/queries.json", "docs": "jsdoc -c .jsdoc.js", - "lint": "gts check && eslint samples/", + "lint": "gts check", "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", - "test": "nyc mocha build/test", + "test": "c8 mocha build/test", "system-test": "mocha build/system-test --timeout 600000", "presystem-test": "npm run compile", - "check": "gts check", "clean": "gts clean", - "compile": "tsc -p .", - "fix": "gts fix && eslint --fix '**/*.js'", + "compile": "tsc -p . && cp src/types.d.ts build/src/", + "fix": "gts fix", "predocs": "npm run compile", "prepare": "npm run compile", "pretest": "npm run compile", - "posttest": "npm run check", - "docs-test": "linkinator docs -r --skip www.googleapis.com", + "docs-test": "linkinator docs", "predocs-test": "npm run docs", - "generate-scaffolding": "repo-tools generate all && repo-tools generate lib_samples_readme -l samples/ --config ../.cloud-repo-tools.json" + "types": "dtsd bigquery v2 > ./src/types.d.ts", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" }, "dependencies": { - "@google-cloud/common": "^0.31.0", - "@google-cloud/paginator": "^0.2.0", - "@google-cloud/promisify": "^0.4.0", - "arrify": "^1.0.0", - "big.js": "^5.1.2", + "@google-cloud/common": "^3.1.0", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "big.js": "^6.0.0", "duplexify": "^4.0.0", - "extend": "^3.0.1", - "is": "^3.0.1", - "stream-events": "^1.0.1", - "string-format-obj": "^1.0.0", - "teeny-request": "^3.11.0", - "uuid": "^3.1.0" + "extend": "^3.0.2", + "is": "^3.3.0", + "p-event": "^4.1.0", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" }, "devDependencies": { - "@google-cloud/nodejs-repo-tools": "^3.0.0", - "@google-cloud/storage": "^2.0.0", - "@types/arrify": "^1.0.4", - "@types/big.js": "^4.0.5", + "@google-cloud/storage": "^5.0.0", + "@types/big.js": "^6.0.0", "@types/execa": "^0.9.0", - "@types/extend": "^3.0.0", + "@types/extend": "^3.0.1", "@types/is": "0.0.21", - "@types/mocha": "^5.2.5", + "@types/mocha": "^8.0.0", "@types/mv": "^2.1.0", "@types/ncp": "^2.0.1", + "@types/node": "^14.0.0", "@types/proxyquire": "^1.3.28", - "@types/request": "^2.48.0", - "@types/sinon": "^7.0.0", - "@types/tmp": "0.0.34", - "@types/uuid": "^3.4.4", - "codecov": "^3.0.0", - "eslint": "^5.0.0", - "eslint-config-prettier": "^4.0.0", - "eslint-plugin-node": "^8.0.0", - "eslint-plugin-prettier": "^3.0.0", - "execa": "^1.0.0", - "gts": "^0.9.0", - "ink-docstrap": "^1.3.2", - "intelli-espower-loader": "^1.0.1", - "jsdoc": "^3.5.5", - "mocha": "^6.0.0", + "@types/sinon": "^10.0.0", + "@types/tmp": "0.2.1", + "@types/uuid": "^8.0.0", + "c8": "^7.0.0", + "codecov": "^3.5.0", + "discovery-tsd": "^0.2.0", + "execa": "^5.0.0", + "gts": "^2.0.0", + "jsdoc": "^3.6.3", + "jsdoc-fresh": "^1.0.1", + "jsdoc-region-tag": "^1.0.2", + "linkinator": "^2.0.0", + "mocha": "^8.0.0", "mv": "^2.1.1", "ncp": "^2.0.0", - "nyc": "^13.0.0", - "power-assert": "^1.4.4", - "prettier": "^1.13.2", - "proxyquire": "^2.0.0", - "sinon": "^7.0.0", - "source-map-support": "^0.5.6", - "tmp": "0.0.33", - "typescript": "~3.3.0", - "jsdoc-baseline": "git+https://github.com/hegemonic/jsdoc-baseline.git", - "linkinator": "^1.1.2" + "proxyquire": "^2.1.0", + "sinon": "^11.0.0", + "tmp": "0.2.1", + "typescript": "^3.8.3" } } diff --git a/renovate.json b/renovate.json index 61f31b77..9518bf36 100644 --- a/renovate.json +++ b/renovate.json @@ -14,5 +14,6 @@ "extends": "packages:linters", "groupName": "linters" } - ] + ], + "ignoreDeps": ["typescript"] } diff --git a/samples/README.md b/samples/README.md index a970b9e7..2f67121b 100644 --- a/samples/README.md +++ b/samples/README.md @@ -1,158 +1,1726 @@ [//]: # "This README.md file is auto-generated, all changes to this file will be lost." -[//]: # "To regenerate it, use `npm run generate-scaffolding`." +[//]: # "To regenerate it, use `python -m synthtool`." Google Cloud Platform logo -# Google BigQuery: Node.js Samples +# [Google BigQuery: Node.js Samples](https://github.com/googleapis/nodejs-bigquery) [![Open in Cloud Shell][shell_img]][shell_link] -[BigQuery](https://cloud.google.com/bigquery/docs) is Google's fully managed, petabyte scale, low cost analytics data warehouse. BigQuery is NoOps—there is no infrastructure to manage and you don't need a database administrator—so you can focus on analyzing data to find meaningful insights, use familiar SQL, and take advantage of our pay-as-you-go model. + ## Table of Contents * [Before you begin](#before-you-begin) * [Samples](#samples) - * [Dataset create](#dataset-create) - * [Dataset delete](#dataset-delete) - * [Dataset list](#dataset-list) - * [Tables](#tables) - * [Queries](#queries) + * [Add Column Load Append](#add-column-load-append) + * [Add Column Query Append](#add-column-query-append) + * [Add Empty Column](#add-empty-column) + * [Auth View Tutorial](#auth-view-tutorial) + * [Browse Table](#browse-table) + * [Cancel Job](#cancel-job) + * [Client JSON Credentials](#client-json-credentials) + * [Copy Table](#copy-table) + * [Copy Table Multiple Source](#copy-table-multiple-source) + * [Create Dataset](#create-dataset) + * [Create Job](#create-job) + * [Create Model](#create-model) + * [Create Routine](#create-routine) + * [Create Routine DDL](#create-routine-ddl) + * [Create Table](#create-table) + * [Create Table Partitioned](#create-table-partitioned) + * [Create Table Range Partitioned](#create-table-range-partitioned) + * [Create View](#create-view) + * [Ddl Create View](#ddl-create-view) + * [Delete Dataset](#delete-dataset) + * [Delete Label Dataset](#delete-label-dataset) + * [Delete Label Table](#delete-label-table) + * [Delete Model](#delete-model) + * [Delete Routine](#delete-routine) + * [Delete Table](#delete-table) + * [Extract Table Compressed](#extract-table-compressed) + * [Extract Table JSON](#extract-table-json) + * [Extract Table To GCS](#extract-table-to-gcs) + * [Get Dataset](#get-dataset) + * [Get Dataset Labels](#get-dataset-labels) + * [Get Job](#get-job) + * [BigQuery Get Model](#bigquery-get-model) + * [Get Routine](#get-routine) + * [BigQuery Get Table](#bigquery-get-table) + * [Get Table Labels](#get-table-labels) + * [Get View](#get-view) + * [Insert Rows As Stream](#insert-rows-as-stream) + * [Inserting Data Types](#inserting-data-types) + * [BigQuery Label Dataset](#bigquery-label-dataset) + * [Label Table](#label-table) + * [List Datasets](#list-datasets) + * [List Datasets By Label](#list-datasets-by-label) + * [List Jobs](#list-jobs) + * [BigQuery List Models](#bigquery-list-models) + * [BigQuery List Models Streaming](#bigquery-list-models-streaming) + * [List Routines](#list-routines) + * [List Tables](#list-tables) + * [Load CSV From GCS](#load-csv-from-gcs) + * [Load CSV From GCS Autodetect](#load-csv-from-gcs-autodetect) + * [Load CSV From GCS Truncate](#load-csv-from-gcs-truncate) + * [Load JSON From GCS](#load-json-from-gcs) + * [Load JSON From GCS Autodetect](#load-json-from-gcs-autodetect) + * [Load JSON From GCS Truncate](#load-json-from-gcs-truncate) + * [Load Local File](#load-local-file) + * [Load Orc From GCS Truncate](#load-orc-from-gcs-truncate) + * [Load Parquet From GCS Truncate](#load-parquet-from-gcs-truncate) + * [Load Table GCS Avro](#load-table-gcs-avro) + * [Load Table GCS Avro Truncate](#load-table-gcs-avro-truncate) + * [Load Table GCSORC](#load-table-gcsorc) + * [Load Table GCS Parquet](#load-table-gcs-parquet) + * [Load Table Partitioned](#load-table-partitioned) + * [Nested Repeated Schema](#nested-repeated-schema) + * [Query](#query) + * [Query Batch](#query-batch) + * [Query Destination Table](#query-destination-table) + * [Query Disable Cache](#query-disable-cache) + * [Query Dry Run](#query-dry-run) + * [Query External GCS Perm](#query-external-gcs-perm) + * [Query Legacy](#query-legacy) + * [Query Legacy Large Results](#query-legacy-large-results) + * [Query Pagination](#query-pagination) + * [Query Params Arrays](#query-params-arrays) + * [Query Params Named](#query-params-named) + * [Query Params Named Types](#query-params-named-types) + * [Query Params Positional](#query-params-positional) + * [Query Params Positional Types](#query-params-positional-types) + * [Query Params Structs](#query-params-structs) + * [Query Params Timestamps](#query-params-timestamps) + * [Query Stack Overflow](#query-stack-overflow) + * [Quickstart](#quickstart) + * [Relax Column](#relax-column) + * [Relax Column Load Append](#relax-column-load-append) + * [Relax Column Query Append](#relax-column-query-append) + * [Set User Agent](#set-user-agent) + * [Undelete Table](#undelete-table) + * [Update Dataset Access](#update-dataset-access) + * [Update Dataset Description](#update-dataset-description) + * [Update Dataset Expiration](#update-dataset-expiration) + * [BigQuery Update Model](#bigquery-update-model) + * [Update Routine](#update-routine) + * [Update Table Description](#update-table-description) + * [Update Table Expiration](#update-table-expiration) + * [Update View Query](#update-view-query) ## Before you begin -Before running the samples, make sure you've followed the steps in the -[Before you begin section](../README.md#before-you-begin) of the client -library's README. +Before running the samples, make sure you've followed the steps outlined in +[Using the client library](https://github.com/googleapis/nodejs-bigquery#using-the-client-library). + +`cd samples` + +`npm install` + +`cd ..` ## Samples -### Dataset create - -View the [source code][dataset-create_0_code]. - -[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createDataset.js,samples/README.md)[dataset-create_0_docs]: https://cloud.google.com/nodejs/docs/reference/bigquery/latest/ -[dataset-create_0_code]: createDataset.js - -### Dataset delete - -View the [source code][dataset-delete_1_code]. - -[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteDataset.js,samples/README.md)[dataset-delete_1_docs]: https://cloud.google.com/nodejs/docs/reference/bigquery/latest/ -[dataset-delete_1_code]: deleteDataset.js - -### Dataset list - -View the [source code][dataset-list_2_code]. - -[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listDatasets.js,samples/README.md)[dataset-list_2_docs]: https://cloud.google.com/nodejs/docs/reference/bigquery/latest/ -[dataset-list_2_code]: listDatasets.js - -### Tables - -View the [source code][tables_3_code]. - -[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/tables.js,samples/README.md) - -__Usage:__ `node tables.js --help` - -``` -tables.js - -Commands: - tables.js create Creates a new table. - tables.js list Lists all tables in a dataset. - tables.js delete Deletes a table. - tables.js copy Makes a copy of a table. - - tables.js browse Lists rows in a table. - tables.js load-local-csv Loads data from a local file into a table. - - tables.js load-gcs-orc Loads sample ORC data from a Google Cloud Storage file - into a table. - tables.js load-gcs-parquet Loads sample Parquet data from a Google Cloud Storage - file into a table. - tables.js load-gcs-csv Loads sample CSV data from a Google Cloud Storage file - into a table. - tables.js load-gcs-json Loads sample JSON data from a Google Cloud Storage file - into a table. - tables.js load-gcs-csv-autodetect Loads sample CSV data from a Google Cloud Storage file - into a table. - tables.js load-gcs-json-autodetect Loads sample JSON data from a Google Cloud Storage file - into a table. - tables.js load-gcs-csv-truncate Loads sample CSV data from GCS, replacing an existing - table. - tables.js load-gcs-json-truncate Loads sample JSON data from GCS, replacing an existing - table. - tables.js load-gcs-parquet-truncate Loads sample Parquet data from GCS, replacing an - existing table. - tables.js load-gcs-orc-truncate Loads sample Orc data from GCS, replacing an existing - table. - tables.js extract Extract a table from BigQuery to Google Cloud Storage. - - tables.js insert Insert a JSON array (as a string or newline-delimited - file) into a BigQuery table. - -Options: - --version Show version number [boolean] - --help Show help [boolean] - -Examples: - node tables.js create my-project-id my_dataset my_table Creates a new table named "my_table" in "my_dataset". - "Name:string, Age:integer, Weight:float, IsMagic:boolean" - node tables.js list my-project-id my_dataset Lists tables in "my_dataset". - node tables.js browse my-project-id my_dataset my_table Displays rows from "my_table" in "my_dataset". - node tables.js delete my-project-id my_dataset my_table Deletes "my_table" from "my_dataset". - node tables.js load my-project-id my_dataset my_table Imports a local file into a table. - ./data.csv - node tables.js load-gcs my-project-id my_dataset my_table Imports a GCS file into a table. - my-bucket data.csv - node tables.js extract my-project-id my_dataset my_table Exports my_dataset:my_table to gcs://my-bucket/my-file - my-bucket my-file as raw CSV. - node tables.js extract my-project-id my_dataset my_table Exports my_dataset:my_table to gcs://my-bucket/my-file - my-bucket my-file -f JSON --gzip as gzipped JSON. - node tables.js insert my-project-id my_dataset my_table Inserts the JSON array represented by json_string into - json_string my_dataset:my_table. - node tables.js insert my-project-id my_dataset my_table Inserts the JSON objects contained in json_file (one per - json_file line) into my_dataset:my_table. - node tables.js copy my-project-id src_dataset src_table Copies src_dataset:src_table to dest_dataset:dest_table. - dest_dataset dest_table - -For more information, see https://cloud.google.com/bigquery/docs -``` - -[tables_3_docs]: https://cloud.google.com/nodejs/docs/reference/bigquery/latest/ -[tables_3_code]: tables.js - -### Queries - -View the [source code][queries_4_code]. - -[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queries.js,samples/README.md) - -__Usage:__ `node queries.js --help` - -``` -queries.js - -Commands: - queries.js stackoverflow Queries a public Stack Overflow dataset. - queries.js query Queries the US Names dataset. - queries.js disable-cache Queries the Shakespeare dataset with the cache disabled. - -Options: - --version Show version number [boolean] - --help Show help [boolean] - -Examples: - node queries.js stackoverflow Queries a public Stackoverflow dataset. - node queries.js query Queries the US Names dataset. - node queries.js disable-cache Queries the Shakespeare dataset with the cache disabled. - -For more information, see https://cloud.google.com/bigquery/docs -``` - -[queries_4_docs]: https://cloud.google.com/nodejs/docs/reference/bigquery/latest/ -[queries_4_code]: queries.js + + +### Add Column Load Append + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/addColumnLoadAppend.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/addColumnLoadAppend.js,samples/README.md) + +__Usage:__ + + +`node samples/addColumnLoadAppend.js` + + +----- + + + + +### Add Column Query Append + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/addColumnQueryAppend.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/addColumnQueryAppend.js,samples/README.md) + +__Usage:__ + + +`node samples/addColumnQueryAppend.js` + + +----- + + + + +### Add Empty Column + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/addEmptyColumn.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/addEmptyColumn.js,samples/README.md) + +__Usage:__ + + +`node samples/addEmptyColumn.js` + + +----- + + + + +### Auth View Tutorial + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/authViewTutorial.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/authViewTutorial.js,samples/README.md) + +__Usage:__ + + +`node samples/authViewTutorial.js` + + +----- + + + + +### Browse Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/browseTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/browseTable.js,samples/README.md) + +__Usage:__ + + +`node samples/browseTable.js` + + +----- + + + + +### Cancel Job + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/cancelJob.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/cancelJob.js,samples/README.md) + +__Usage:__ + + +`node samples/cancelJob.js` + + +----- + + + + +### Client JSON Credentials + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/clientJSONCredentials.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/clientJSONCredentials.js,samples/README.md) + +__Usage:__ + + +`node samples/clientJSONCredentials.js` + + +----- + + + + +### Copy Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/copyTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/copyTable.js,samples/README.md) + +__Usage:__ + + +`node samples/copyTable.js` + + +----- + + + + +### Copy Table Multiple Source + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/copyTableMultipleSource.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/copyTableMultipleSource.js,samples/README.md) + +__Usage:__ + + +`node samples/copyTableMultipleSource.js` + + +----- + + + + +### Create Dataset + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createDataset.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createDataset.js,samples/README.md) + +__Usage:__ + + +`node samples/createDataset.js` + + +----- + + + + +### Create Job + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createJob.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createJob.js,samples/README.md) + +__Usage:__ + + +`node samples/createJob.js` + + +----- + + + + +### Create Model + +Creates a model in a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createModel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createModel.js,samples/README.md) + +__Usage:__ + + +`node createModel.js ` + + +----- + + + + +### Create Routine + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createRoutine.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createRoutine.js,samples/README.md) + +__Usage:__ + + +`node samples/createRoutine.js` + + +----- + + + + +### Create Routine DDL + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createRoutineDDL.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createRoutineDDL.js,samples/README.md) + +__Usage:__ + + +`node samples/createRoutineDDL.js` + + +----- + + + + +### Create Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createTable.js,samples/README.md) + +__Usage:__ + + +`node samples/createTable.js` + + +----- + + + + +### Create Table Partitioned + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createTablePartitioned.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createTablePartitioned.js,samples/README.md) + +__Usage:__ + + +`node samples/createTablePartitioned.js` + + +----- + + + + +### Create Table Range Partitioned + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createTableRangePartitioned.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createTableRangePartitioned.js,samples/README.md) + +__Usage:__ + + +`node samples/createTableRangePartitioned.js` + + +----- + + + + +### Create View + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/createView.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/createView.js,samples/README.md) + +__Usage:__ + + +`node samples/createView.js` + + +----- + + + + +### Ddl Create View + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/ddlCreateView.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/ddlCreateView.js,samples/README.md) + +__Usage:__ + + +`node samples/ddlCreateView.js` + + +----- + + + + +### Delete Dataset + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteDataset.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteDataset.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteDataset.js` + + +----- + + + + +### Delete Label Dataset + +Deletes a label on a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteLabelDataset.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteLabelDataset.js,samples/README.md) + +__Usage:__ + + +`node deleteLabelDataset.js ` + + +----- + + + + +### Delete Label Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteLabelTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteLabelTable.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteLabelTable.js` + + +----- + + + + +### Delete Model + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteModel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteModel.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteModel.js` + + +----- + + + + +### Delete Routine + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteRoutine.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteRoutine.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteRoutine.js` + + +----- + + + + +### Delete Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/deleteTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/deleteTable.js,samples/README.md) + +__Usage:__ + + +`node samples/deleteTable.js` + + +----- + + + + +### Extract Table Compressed + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/extractTableCompressed.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/extractTableCompressed.js,samples/README.md) + +__Usage:__ + + +`node samples/extractTableCompressed.js` + + +----- + + + + +### Extract Table JSON + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/extractTableJSON.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/extractTableJSON.js,samples/README.md) + +__Usage:__ + + +`node samples/extractTableJSON.js` + + +----- + + + + +### Extract Table To GCS + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/extractTableToGCS.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/extractTableToGCS.js,samples/README.md) + +__Usage:__ + + +`node samples/extractTableToGCS.js` + + +----- + + + + +### Get Dataset + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getDataset.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getDataset.js,samples/README.md) + +__Usage:__ + + +`node samples/getDataset.js` + + +----- + + + + +### Get Dataset Labels + +Gets labels on a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getDatasetLabels.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getDatasetLabels.js,samples/README.md) + +__Usage:__ + + +`node getDatasetLabels.js ` + + +----- + + + + +### Get Job + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getJob.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getJob.js,samples/README.md) + +__Usage:__ + + +`node samples/getJob.js` + + +----- + + + + +### BigQuery Get Model + +Retrieves an existing model from a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getModel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getModel.js,samples/README.md) + +__Usage:__ + + +`node getModel.js ` + + +----- + + + + +### Get Routine + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getRoutine.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getRoutine.js,samples/README.md) + +__Usage:__ + + +`node samples/getRoutine.js` + + +----- + + + + +### BigQuery Get Table + +Retrieves an existing table from a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getTable.js,samples/README.md) + +__Usage:__ + + +`node getTable.js ` + + +----- + + + + +### Get Table Labels + +Gets labels on a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getTableLabels.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getTableLabels.js,samples/README.md) + +__Usage:__ + + +`node getTableLabels.js ` + + +----- + + + + +### Get View + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/getView.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/getView.js,samples/README.md) + +__Usage:__ + + +`node samples/getView.js` + + +----- + + + + +### Insert Rows As Stream + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/insertRowsAsStream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/insertRowsAsStream.js,samples/README.md) + +__Usage:__ + + +`node samples/insertRowsAsStream.js` + + +----- + + + + +### Inserting Data Types + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/insertingDataTypes.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/insertingDataTypes.js,samples/README.md) + +__Usage:__ + + +`node samples/insertingDataTypes.js` + + +----- + + + + +### BigQuery Label Dataset + +Updates a label on a dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/labelDataset.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/labelDataset.js,samples/README.md) + +__Usage:__ + + +`node labelDataset.js ` + + +----- + + + + +### Label Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/labelTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/labelTable.js,samples/README.md) + +__Usage:__ + + +`node samples/labelTable.js` + + +----- + + + + +### List Datasets + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listDatasets.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listDatasets.js,samples/README.md) + +__Usage:__ + + +`node samples/listDatasets.js` + + +----- + + + + +### List Datasets By Label + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listDatasetsByLabel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listDatasetsByLabel.js,samples/README.md) + +__Usage:__ + + +`node samples/listDatasetsByLabel.js` + + +----- + + + + +### List Jobs + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listJobs.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listJobs.js,samples/README.md) + +__Usage:__ + + +`node samples/listJobs.js` + + +----- + + + + +### BigQuery List Models + +Lists all existing models in the dataset. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listModels.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listModels.js,samples/README.md) + +__Usage:__ + + +`node listModels.js ` + + +----- + + + + +### BigQuery List Models Streaming + +Lists all existing models in the dataset using streaming method. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listModelsStreaming.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listModelsStreaming.js,samples/README.md) + +__Usage:__ + + +`node listModelsStreaming.js ` + + +----- + + + + +### List Routines + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listRoutines.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listRoutines.js,samples/README.md) + +__Usage:__ + + +`node samples/listRoutines.js` + + +----- + + + + +### List Tables + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/listTables.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/listTables.js,samples/README.md) + +__Usage:__ + + +`node samples/listTables.js` + + +----- + + + + +### Load CSV From GCS + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadCSVFromGCS.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadCSVFromGCS.js,samples/README.md) + +__Usage:__ + + +`node samples/loadCSVFromGCS.js` + + +----- + + + + +### Load CSV From GCS Autodetect + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadCSVFromGCSAutodetect.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadCSVFromGCSAutodetect.js,samples/README.md) + +__Usage:__ + + +`node samples/loadCSVFromGCSAutodetect.js` + + +----- + + + + +### Load CSV From GCS Truncate + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadCSVFromGCSTruncate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadCSVFromGCSTruncate.js,samples/README.md) + +__Usage:__ + + +`node samples/loadCSVFromGCSTruncate.js` + + +----- + + + + +### Load JSON From GCS + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadJSONFromGCS.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadJSONFromGCS.js,samples/README.md) + +__Usage:__ + + +`node samples/loadJSONFromGCS.js` + + +----- + + + + +### Load JSON From GCS Autodetect + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadJSONFromGCSAutodetect.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadJSONFromGCSAutodetect.js,samples/README.md) + +__Usage:__ + + +`node samples/loadJSONFromGCSAutodetect.js` + + +----- + + + + +### Load JSON From GCS Truncate + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadJSONFromGCSTruncate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadJSONFromGCSTruncate.js,samples/README.md) + +__Usage:__ + + +`node samples/loadJSONFromGCSTruncate.js` + + +----- + + + + +### Load Local File + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadLocalFile.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadLocalFile.js,samples/README.md) + +__Usage:__ + + +`node samples/loadLocalFile.js` + + +----- + + + + +### Load Orc From GCS Truncate + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadOrcFromGCSTruncate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadOrcFromGCSTruncate.js,samples/README.md) + +__Usage:__ + + +`node samples/loadOrcFromGCSTruncate.js` + + +----- + + + + +### Load Parquet From GCS Truncate + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadParquetFromGCSTruncate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadParquetFromGCSTruncate.js,samples/README.md) + +__Usage:__ + + +`node samples/loadParquetFromGCSTruncate.js` + + +----- + + + + +### Load Table GCS Avro + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSAvro.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSAvro.js,samples/README.md) + +__Usage:__ + + +`node samples/loadTableGCSAvro.js` + + +----- + + + + +### Load Table GCS Avro Truncate + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSAvroTruncate.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSAvroTruncate.js,samples/README.md) + +__Usage:__ + + +`node samples/loadTableGCSAvroTruncate.js` + + +----- + + + + +### Load Table GCSORC + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSORC.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSORC.js,samples/README.md) + +__Usage:__ + + +`node samples/loadTableGCSORC.js` + + +----- + + + + +### Load Table GCS Parquet + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTableGCSParquet.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTableGCSParquet.js,samples/README.md) + +__Usage:__ + + +`node samples/loadTableGCSParquet.js` + + +----- + + + + +### Load Table Partitioned + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/loadTablePartitioned.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/loadTablePartitioned.js,samples/README.md) + +__Usage:__ + + +`node samples/loadTablePartitioned.js` + + +----- + + + + +### Nested Repeated Schema + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/nestedRepeatedSchema.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/nestedRepeatedSchema.js,samples/README.md) + +__Usage:__ + + +`node samples/nestedRepeatedSchema.js` + + +----- + + + + +### Query + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/query.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/query.js,samples/README.md) + +__Usage:__ + + +`node samples/query.js` + + +----- + + + + +### Query Batch + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryBatch.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryBatch.js,samples/README.md) + +__Usage:__ + + +`node samples/queryBatch.js` + + +----- + + + + +### Query Destination Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryDestinationTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryDestinationTable.js,samples/README.md) + +__Usage:__ + + +`node samples/queryDestinationTable.js` + + +----- + + + + +### Query Disable Cache + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryDisableCache.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryDisableCache.js,samples/README.md) + +__Usage:__ + + +`node samples/queryDisableCache.js` + + +----- + + + + +### Query Dry Run + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryDryRun.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryDryRun.js,samples/README.md) + +__Usage:__ + + +`node samples/queryDryRun.js` + + +----- + + + + +### Query External GCS Perm + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryExternalGCSPerm.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryExternalGCSPerm.js,samples/README.md) + +__Usage:__ + + +`node samples/queryExternalGCSPerm.js` + + +----- + + + + +### Query Legacy + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryLegacy.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryLegacy.js,samples/README.md) + +__Usage:__ + + +`node samples/queryLegacy.js` + + +----- + + + + +### Query Legacy Large Results + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryLegacyLargeResults.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryLegacyLargeResults.js,samples/README.md) + +__Usage:__ + + +`node samples/queryLegacyLargeResults.js` + + +----- + + + + +### Query Pagination + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryPagination.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryPagination.js,samples/README.md) + +__Usage:__ + + +`node samples/queryPagination.js` + + +----- + + + + +### Query Params Arrays + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsArrays.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsArrays.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsArrays.js` + + +----- + + + + +### Query Params Named + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsNamed.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsNamed.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsNamed.js` + + +----- + + + + +### Query Params Named Types + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsNamedTypes.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsNamedTypes.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsNamedTypes.js` + + +----- + + + + +### Query Params Positional + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsPositional.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsPositional.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsPositional.js` + + +----- + + + + +### Query Params Positional Types + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsPositionalTypes.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsPositionalTypes.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsPositionalTypes.js` + + +----- + + + + +### Query Params Structs + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsStructs.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsStructs.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsStructs.js` + + +----- + + + + +### Query Params Timestamps + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryParamsTimestamps.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryParamsTimestamps.js,samples/README.md) + +__Usage:__ + + +`node samples/queryParamsTimestamps.js` + + +----- + + + + +### Query Stack Overflow + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/queryStackOverflow.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryStackOverflow.js,samples/README.md) + +__Usage:__ + + +`node samples/queryStackOverflow.js` + + +----- + + + + +### Quickstart + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/quickstart.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) + +__Usage:__ + + +`node samples/quickstart.js` + + +----- + + + + +### Relax Column + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/relaxColumn.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/relaxColumn.js,samples/README.md) + +__Usage:__ + + +`node samples/relaxColumn.js` + + +----- + + + + +### Relax Column Load Append + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/relaxColumnLoadAppend.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/relaxColumnLoadAppend.js,samples/README.md) + +__Usage:__ + + +`node samples/relaxColumnLoadAppend.js` + + +----- + + + + +### Relax Column Query Append + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/relaxColumnQueryAppend.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/relaxColumnQueryAppend.js,samples/README.md) + +__Usage:__ + + +`node samples/relaxColumnQueryAppend.js` + + +----- + + + + +### Set User Agent + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/setUserAgent.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/setUserAgent.js,samples/README.md) + +__Usage:__ + + +`node samples/setUserAgent.js` + + +----- + + + + +### Undelete Table + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/undeleteTable.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/undeleteTable.js,samples/README.md) + +__Usage:__ + + +`node samples/undeleteTable.js` + + +----- + + + + +### Update Dataset Access + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateDatasetAccess.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateDatasetAccess.js,samples/README.md) + +__Usage:__ + + +`node samples/updateDatasetAccess.js` + + +----- + + + + +### Update Dataset Description + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateDatasetDescription.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateDatasetDescription.js,samples/README.md) + +__Usage:__ + + +`node samples/updateDatasetDescription.js` + + +----- + + + + +### Update Dataset Expiration + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateDatasetExpiration.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateDatasetExpiration.js,samples/README.md) + +__Usage:__ + + +`node samples/updateDatasetExpiration.js` + + +----- + + + + +### BigQuery Update Model + +Updates a model's metadata. + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateModel.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateModel.js,samples/README.md) + +__Usage:__ + + +`node updateModel.js ` + + +----- + + + + +### Update Routine + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateRoutine.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateRoutine.js,samples/README.md) + +__Usage:__ + + +`node samples/updateRoutine.js` + + +----- + + + + +### Update Table Description + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateTableDescription.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateTableDescription.js,samples/README.md) + +__Usage:__ + + +`node samples/updateTableDescription.js` + + +----- + + + + +### Update Table Expiration + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateTableExpiration.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateTableExpiration.js,samples/README.md) + +__Usage:__ + + +`node samples/updateTableExpiration.js` + + +----- + + + + +### Update View Query + +View the [source code](https://github.com/googleapis/nodejs-bigquery/blob/master/samples/updateViewQuery.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/updateViewQuery.js,samples/README.md) + +__Usage:__ + + +`node samples/updateViewQuery.js` + + + + + [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png [shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/README.md +[product-docs]: https://cloud.google.com/bigquery diff --git a/samples/addColumnLoadAppend.js b/samples/addColumnLoadAppend.js new file mode 100644 index 00000000..fedcc8e5 --- /dev/null +++ b/samples/addColumnLoadAppend.js @@ -0,0 +1,78 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + fileName = '/path/to/file.csv' +) { + // [START bigquery_add_column_load_append] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + + // Instantiate client + const bigquery = new BigQuery(); + + async function addColumnLoadAppend() { + // Adds a new column to a BigQuery table while appending rows via a load job. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const fileName = '/path/to/file.csv'; + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // In this example, the existing table contains only the 'Name', 'Age', + // & 'Weight' columns. 'REQUIRED' fields cannot be added to an existing + // schema, so the additional column must be 'NULLABLE'. + const schema = 'Name:STRING, Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN'; + + // Retrieve destination table reference + const [table] = await bigquery + .dataset(datasetId) + .table(tableId) + .get(); + const destinationTableRef = table.metadata.tableReference; + + // Set load job options + const options = { + schema: schema, + schemaUpdateOptions: ['ALLOW_FIELD_ADDITION'], + writeDisposition: 'WRITE_APPEND', + destinationTable: destinationTableRef, + }; + + // Load data from a local file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(fileName, options); + + console.log(`Job ${job.id} completed.`); + console.log('New Schema:'); + console.log(job.configuration.load.schema.fields); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_add_column_load_append] + addColumnLoadAppend(); +} +main(...process.argv.slice(2)); diff --git a/samples/addColumnQueryAppend.js b/samples/addColumnQueryAppend.js new file mode 100644 index 00000000..e99607b3 --- /dev/null +++ b/samples/addColumnQueryAppend.js @@ -0,0 +1,73 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_add_column_query_append] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + + // Instantiate client + const bigquery = new BigQuery(); + + async function addColumnQueryAppend() { + // Adds a new column to a BigQuery table while appending rows via a query job. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Retrieve destination table reference + const [table] = await bigquery + .dataset(datasetId) + .table(tableId) + .get(); + const destinationTableRef = table.metadata.tableReference; + + // In this example, the existing table contains only the 'name' column. + // 'REQUIRED' fields cannot be added to an existing schema, + // so the additional column must be 'NULLABLE'. + const query = `SELECT name, year + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE state = 'TX' + LIMIT 10`; + + // Set load job options + const options = { + query: query, + schemaUpdateOptions: ['ALLOW_FIELD_ADDITION'], + writeDisposition: 'WRITE_APPEND', + destinationTable: destinationTableRef, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + }; + + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + console.log(`Job ${job.id} completed.`); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_add_column_query_append] + addColumnQueryAppend(); +} +main(...process.argv.slice(2)); diff --git a/samples/addEmptyColumn.js b/samples/addEmptyColumn.js new file mode 100644 index 00000000..e81d5071 --- /dev/null +++ b/samples/addEmptyColumn.js @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_add_empty_column] + + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function addEmptyColumn() { + // Adds an empty column to the schema. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + const column = {name: 'size', type: 'STRING'}; + + // Retrieve current table metadata + const table = bigquery.dataset(datasetId).table(tableId); + const [metadata] = await table.getMetadata(); + + // Update table schema + const schema = metadata.schema; + const new_schema = schema; + new_schema.fields.push(column); + metadata.schema = new_schema; + + const [result] = await table.setMetadata(metadata); + console.log(result.schema.fields); + } + // [END bigquery_add_empty_column] + addEmptyColumn(); +} + +main(...process.argv.slice(2)); diff --git a/samples/auth-user-sample/authUserFlow.js b/samples/auth-user-sample/authUserFlow.js new file mode 100644 index 00000000..c45be49f --- /dev/null +++ b/samples/auth-user-sample/authUserFlow.js @@ -0,0 +1,151 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// [START bigquery_auth_user_flow] +const {OAuth2Client} = require('google-auth-library'); +const readline = require('readline-promise').default; + +function startRl() { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + return rl; +} + +/** + * Download your OAuth2 configuration from the Google + * Developers Console API Credentials page. + * https://console.cloud.google.com/apis/credentials + */ +const keys = require('./oauth2.keys.json'); + +/** + * Create a new OAuth2Client, and go through the OAuth2 content + * workflow. Return the full client to the callback. + */ +async function getRedirectUrl() { + const rl = main.startRl(); + // Create an oAuth client to authorize the API call. Secrets are kept in a `keys.json` file, + // which should be downloaded from the Google Developers Console. + const oAuth2Client = new OAuth2Client( + keys.installed.client_id, + keys.installed.client_secret, + keys.installed.redirect_uris[0] + ); + + // Generate the url that will be used for the consent dialog. + const authorizeUrl = oAuth2Client.generateAuthUrl({ + access_type: 'offline', + scope: 'https://www.googleapis.com/auth/bigquery', + prompt: 'consent', + }); + + console.info( + `Please visit this URL to authorize this application: ${authorizeUrl}` + ); + + const code = await rl.questionAsync('Enter the authorization code: '); + const tokens = await main.exchangeCode(code); + rl.close(); + + return tokens; +} + +// Exchange an authorization code for an access token +async function exchangeCode(code) { + const oAuth2Client = new OAuth2Client( + keys.installed.client_id, + keys.installed.client_secret, + keys.installed.redirect_uris[0] + ); + + const r = await oAuth2Client.getToken(code); + console.info(r.tokens); + return r.tokens; +} + +async function authFlow(projectId = 'project_id') { + /** + * TODO(developer): + * Save Project ID as environment variable PROJECT_ID="project_id" + * Uncomment the following line before running the sample. + */ + // projectId = process.env.PROJECT_ID; + + const tokens = await main.getRedirectUrl(); + + const credentials = { + type: 'authorized_user', + client_id: keys.installed.client_id, + client_secret: keys.installed.client_secret, + refresh_token: tokens.refresh_token, + }; + + return { + projectId, + credentials, + }; +} +// [END bigquery_auth_user_flow] +// [START bigquery_auth_user_query] +async function query() { + const {BigQuery} = require('@google-cloud/bigquery'); + + const credentials = await main.authFlow(); + const bigquery = new BigQuery(credentials); + + // Queries the U.S. given names dataset for the state of Texas. + const query = `SELECT name, SUM(number) as total + FROM \`bigquery-public-data.usa_names.usa_1910_current\` + WHERE name = 'William' + GROUP BY name;`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + + return rows; +} + +const main = { + query, + authFlow, + exchangeCode, + getRedirectUrl, + startRl, +}; +module.exports = { + main, +}; + +if (module === require.main) { + query().catch(console.error); +} +// [END bigquery_auth_user_query] diff --git a/samples/auth-user-sample/oauth2.keys.json b/samples/auth-user-sample/oauth2.keys.json new file mode 100644 index 00000000..1900f2c7 --- /dev/null +++ b/samples/auth-user-sample/oauth2.keys.json @@ -0,0 +1 @@ +{"installed":{"client_id":"my_client_id","project_id":"my_project","auth_uri":"my_uri","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"my_url","client_secret":"my_secret","redirect_uris":["http://localhost","http://localhost:3000/oauth2callback"]}} diff --git a/samples/auth-user-sample/test/auth.test.js b/samples/auth-user-sample/test/auth.test.js new file mode 100644 index 00000000..35fce78a --- /dev/null +++ b/samples/auth-user-sample/test/auth.test.js @@ -0,0 +1,82 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const sinon = require('sinon'); +const {assert} = require('chai'); +const proxyquire = require('proxyquire'); +const {describe, it, before, afterEach} = require('mocha'); + +describe('authUserFlow()', () => { + let readlineMock, generateUrlStub, questionStub, tokenStub, authUserFlow; + + before(async () => { + questionStub = sinon.stub(); + + readlineMock = { + questionAsync: questionStub, + close: sinon.stub(), + }; + + generateUrlStub = sinon.stub().returns('https://example.com'); + tokenStub = sinon.stub().returns({tokens: 'tokens'}); + + authUserFlow = proxyquire('../authUserFlow.js', { + 'google-auth-library': { + OAuth2Client: sinon.stub().callsFake(() => { + return { + generateAuthUrl: generateUrlStub, + getToken: tokenStub, + }; + }), + }, + }); + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should exchange code for tokens', async () => { + const output = await authUserFlow.main.exchangeCode('abc123'); + assert.strictEqual(output, 'tokens'); + sinon.assert.calledWith(tokenStub, 'abc123'); + }); + + it('should return project id and credentials', async () => { + sinon + .stub(authUserFlow.main, 'getRedirectUrl') + .returns({refresh_token: 'token'}); + const output = await authUserFlow.main.authFlow('my_project'); + assert.strictEqual(output.projectId, 'my_project'); + }); + + it('should get redirect url', async () => { + const startStub = sinon + .stub(authUserFlow.main, 'startRl') + .returns(readlineMock); + await authUserFlow.main.getRedirectUrl(); + sinon.assert.called(startStub); + sinon.assert.called(questionStub); + sinon.assert.called(generateUrlStub); + }); + + it('should run a query', async () => { + const authFlowStub = sinon.stub(authUserFlow.main, 'authFlow').returns({}); + const output = await authUserFlow.main.query(); + assert.strictEqual(output[0].name, 'William'); + sinon.assert.called(authFlowStub); + }); +}); diff --git a/samples/authViewTutorial.js b/samples/authViewTutorial.js new file mode 100644 index 00000000..ed8a416c --- /dev/null +++ b/samples/authViewTutorial.js @@ -0,0 +1,143 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + projectId = 'my_project_id', + sourceDatasetId = 'shared_views', + sourceTableId = 'my_source_table', + sharedDatasetId = 'shared_views', + sharedViewId = 'github_analyst_view' +) { + // [START bigquery_authorized_view_tutorial] + async function authorizedViewTutorial() { + // [START bigquery_avt_create_source_dataset] + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = "my_project_id"; + // const sourceDatasetId = "my_source_dataset"; + // const sourceTableId = "my_source_table"; + // const sharedDatasetId = "shared_views"; + // const sharedViewId = "my_view"; + + // Make API request to create dataset + const [sourceDataset] = await bigquery.createDataset(sourceDatasetId); + console.log(`Source dataset ${sourceDataset.id} created.`); + + const destinationTable = sourceDataset.table(sourceTableId); + + const query = `SELECT commit, author, committer, repo_name + FROM \`bigquery-public-data.github_repos.commits\` + LIMIT 1000`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + query: query, + destination: destinationTable, + }; + + // Make API request to populate a source table + await bigquery.query(options); + // [END bigquery_avt_create_source_table] + // [START bigquery_avt_create_shared_dataset] + // Create a separate dataset to store your view + + // Make API request to create a new dataset + const [sharedDataset] = await bigquery.createDataset(sharedDatasetId); + + console.log(`Dataset ${sharedDataset.id} created.`); + // [END bigquery_avt_create_shared_dataset] + // [START bigquery_avt_create_view] + // Create the view in the new dataset + + const viewQuery = `SELECT + commit, author.name as author, + committer.name as committer, repo_name + FROM + \`${projectId}.${sourceDatasetId}.${sourceTableId}\``; + + const viewOptions = { + view: {query: viewQuery, useLegacySql: false}, + }; + + // Make API request to create the view + const [view] = await sharedDataset.createTable(sharedViewId, viewOptions); + + const viewId = view.metadata.id; + console.log(`View ${viewId} created.`); + // [END bigquery_avt_create_view] + // [START bigquery_avt_shared_dataset_access] + // Assign access controls to the dataset containing the view + + // Note to user: This is a group email for testing purposes. Replace with + // your own group email address when running this code. + const analyst_group_email = 'example-analyst-group@google.com'; + + const analystAccessEntry = { + role: 'READER', + groupByEmail: analyst_group_email, + }; + + // Make API request to retrieve dataset metadata + const [sharedMetadata] = await sharedDataset.getMetadata(); + + const sharedAccessEntries = sharedMetadata.access; + sharedAccessEntries.push(analystAccessEntry); + + sharedMetadata.access = sharedAccessEntries; + + // Make API request to update dataset metadata + const [updatedSharedMetadata] = await sharedDataset.setMetadata( + sharedMetadata + ); + + console.log(`Dataset ${updatedSharedMetadata.id} updated.`); + // [END bigquery_avt_shared_dataset_access] + // [START bigquery_avt_source_dataset_access] + // Authorize the view to access the source dataset + + const viewReference = { + projectId: projectId, + datasetId: sharedDatasetId, + tableId: sharedViewId, + }; + + const datasetAccessEntry = {view: viewReference}; + + // Make API request to retrieve source dataset metadata + const [sourceMetadata] = await sourceDataset.getMetadata(); + + const sourceAccessEntries = sourceMetadata.access; + sourceAccessEntries.push(datasetAccessEntry); + + sourceMetadata.access = sourceAccessEntries; + + // Make API request to update source dataset metadata + const [updatedSourceMetadata] = await sourceDataset.setMetadata( + sourceMetadata + ); + + console.log(`Dataset ${updatedSourceMetadata.id} updated.`); + // [END bigquery_avt_source_dataset_access] + } + // [END bigquery_authorized_view_tutorial] + authorizedViewTutorial(); +} + +main(...process.argv.slice(2)); diff --git a/samples/browseTable.js b/samples/browseTable.js new file mode 100644 index 00000000..4fe94da9 --- /dev/null +++ b/samples/browseTable.js @@ -0,0 +1,86 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_table' // Table to create +) { + // [START bigquery_browse_table] + // Import the Google Cloud client library using default credentials + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function browseTable() { + // Retrieve a table's rows using manual pagination. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; // Existing dataset + // const tableId = 'my_table'; // Table to create + + const query = `SELECT name, SUM(number) as total_people + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + GROUP BY name + ORDER BY total_people + DESC LIMIT 100`; + + // Create table reference. + const dataset = bigquery.dataset(datasetId); + const destinationTable = dataset.table(tableId); + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery + const queryOptions = { + query: query, + destination: destinationTable, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(queryOptions); + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults + const queryResultsOptions = { + // Retrieve zero resulting rows. + maxResults: 0, + }; + + // Wait for the job to finish. + await job.getQueryResults(queryResultsOptions); + + function manualPaginationCallback(err, rows, nextQuery) { + rows.forEach(row => { + console.log(`name: ${row.name}, ${row.total_people} total people`); + }); + + if (nextQuery) { + // More results exist. + destinationTable.getRows(nextQuery, manualPaginationCallback); + } + } + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list + const getRowsOptions = { + autoPaginate: false, + maxResults: 20, + }; + + // Retrieve all rows. + destinationTable.getRows(getRowsOptions, manualPaginationCallback); + } + browseTable(); + // [END bigquery_browse_table] +} +main(...process.argv.slice(2)); diff --git a/samples/cancelJob.js b/samples/cancelJob.js new file mode 100644 index 00000000..85d85f7a --- /dev/null +++ b/samples/cancelJob.js @@ -0,0 +1,42 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(jobId = 'existing-job-id') { + // [START bigquery_cancel_job] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function cancelJob() { + // Attempts to cancel a job. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const jobId = "existing-job-id"; + + // Create a job reference + const job = bigquery.job(jobId); + + // Attempt to cancel job + const [apiResult] = await job.cancel(); + + console.log(apiResult.job.status); + } + // [END bigquery_cancel_job] + cancelJob(); +} +main(...process.argv.slice(2)); diff --git a/samples/clientJSONCredentials.js b/samples/clientJSONCredentials.js new file mode 100644 index 00000000..0c3e5fb4 --- /dev/null +++ b/samples/clientJSONCredentials.js @@ -0,0 +1,59 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_client_json_credentials] + // Create a BigQuery client explicitly using service account credentials. + // by specifying the private key file. + const {BigQuery} = require('@google-cloud/bigquery'); + + const options = { + keyFilename: 'path/to/service_account.json', + projectId: 'my_project', + }; + + const bigquery = new BigQuery(options); + // [END bigquery_client_json_credentials] + async function query() { + // Queries the U.S. given names dataset for the state of Texas. + + const query = `SELECT name + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE state = 'TX' + LIMIT 100`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + query(); +} + +main(...process.argv.slice(2)); diff --git a/samples/copyTable.js b/samples/copyTable.js new file mode 100644 index 00000000..76045270 --- /dev/null +++ b/samples/copyTable.js @@ -0,0 +1,56 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + srcDatasetId = 'my_src_dataset', + srcTableId = 'my_src_table', + destDatasetId = 'my_dest_dataset', + destTableId = 'my_dest_table' +) { + // [START bigquery_copy_table] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function copyTable() { + // Copies src_dataset:src_table to dest_dataset:dest_table. + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const srcDatasetId = "my_src_dataset"; + // const srcTableId = "my_src_table"; + // const destDatasetId = "my_dest_dataset"; + // const destTableId = "my_dest_table"; + + // Copy the table contents into another table + const [job] = await bigquery + .dataset(srcDatasetId) + .table(srcTableId) + .copy(bigquery.dataset(destDatasetId).table(destTableId)); + + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_copy_table] + copyTable(); +} +main(...process.argv.slice(2)); diff --git a/samples/copyTableMultipleSource.js b/samples/copyTableMultipleSource.js new file mode 100644 index 00000000..4dc72ca9 --- /dev/null +++ b/samples/copyTableMultipleSource.js @@ -0,0 +1,57 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + sourceTable = 'my_table', // Existing table to copy from + destinationTable = 'testing' // Existing table to copy to +) { + // [START bigquery_copy_table_multiple_source] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function copyTableMultipleSource() { + // Copy multiple source tables to a given destination. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // sourceTable = 'my_table'; + // destinationTable = 'testing'; + + // Create a client + const dataset = bigquery.dataset(datasetId); + + const metadata = { + createDisposition: 'CREATE_NEVER', + writeDisposition: 'WRITE_TRUNCATE', + }; + + // Create table references + const table = dataset.table(sourceTable); + const yourTable = dataset.table(destinationTable); + + // Copy table + const [apiResponse] = await table.copy(yourTable, metadata); + console.log(apiResponse.configuration.copy); + } + // [END bigquery_copy_table_multiple_source] + copyTableMultipleSource(); +} + +main(...process.argv.slice(2)); diff --git a/samples/createDataset.js b/samples/createDataset.js index 110b0992..6cc4996e 100644 --- a/samples/createDataset.js +++ b/samples/createDataset.js @@ -1,31 +1,44 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + 'use strict'; -async function createDataset(DATASET_ID = 'YOUR_DATASET_ID') { +function main(datasetId = 'my_new_dataset') { // [START bigquery_create_dataset] - // Imports the Google Cloud client library + // Import the Google Cloud client library and create a client const {BigQuery} = require('@google-cloud/bigquery'); - - // Creates a client const bigquery = new BigQuery(); - // Creates a new dataset - const [dataset] = await bigquery.createDataset(DATASET_ID); - console.log(`Dataset ${dataset.id} created.`); + async function createDataset() { + // Creates a new dataset named "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_new_dataset"; + + // Specify the geographic location where the dataset should reside + const options = { + location: 'US', + }; + + // Create a new dataset + const [dataset] = await bigquery.createDataset(datasetId, options); + console.log(`Dataset ${dataset.id} created.`); + } + createDataset(); // [END bigquery_create_dataset] } -createDataset(...process.argv.slice(2)).catch(console.error); +main(...process.argv.slice(2)); diff --git a/samples/createJob.js b/samples/createJob.js new file mode 100644 index 00000000..90fe3877 --- /dev/null +++ b/samples/createJob.js @@ -0,0 +1,54 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_create_job] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createJob() { + // Run a BigQuery query job. + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/Job + const options = { + // Specify a job configuration to set optional job resource properties. + configuration: { + query: { + query: `SELECT country_name + FROM \`bigquery-public-data.utility_us.country_code_iso\` + LIMIT 10`, + useLegacySql: false, + }, + labels: {'example-label': 'example-value'}, + }, + }; + + // Make API request. + const response = await bigquery.createJob(options); + const job = response[0]; + + // Wait for the query to finish + const [rows] = await job.getQueryResults(job); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_create_job] + createJob(); +} +main(...process.argv.slice(2)); diff --git a/samples/createModel.js b/samples/createModel.js new file mode 100644 index 00000000..9ab20b2f --- /dev/null +++ b/samples/createModel.js @@ -0,0 +1,66 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Create Model +// description: Creates a model in a dataset. +// usage: node createModel.js + +function main(datasetId = 'my_dataset', modelId = 'my_model') { + // [START bigquery_create_model] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createModel() { + // Creates a model named "my_model" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const modelId = "my_model"; + + const query = `CREATE OR REPLACE MODEL \`${datasetId}.${modelId}\` + OPTIONS(model_type='logistic_reg') AS + SELECT + IF(totals.transactions IS NULL, 0, 1) AS label, + IFNULL(device.operatingSystem, "") AS os, + device.isMobile AS is_mobile, + IFNULL(geoNetwork.country, "") AS country, + IFNULL(totals.pageviews, 0) AS pageviews + FROM + \`bigquery-public-data.google_analytics_sample.ga_sessions_*\` + WHERE + _TABLE_SUFFIX BETWEEN '20160801' AND '20170631' + LIMIT 100000;`; + + const queryOptions = { + query: query, + }; + + // Run query to create a model + const [job] = await bigquery.createQueryJob(queryOptions); + + // Wait for the query to finish + await job.getQueryResults(); + + console.log(`Model ${modelId} created.`); + } + createModel(); + // [END bigquery_create_model] +} +main(...process.argv.slice(2)); diff --git a/samples/createRoutine.js b/samples/createRoutine.js new file mode 100644 index 00000000..246edbb3 --- /dev/null +++ b/samples/createRoutine.js @@ -0,0 +1,64 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + routineId = 'my_routine' // Routine to be created +) { + // [START bigquery_create_routine] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createRoutine() { + // Creates a new routine named "my_routine" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const routineId = 'my_routine'; + + const dataset = bigquery.dataset(datasetId); + + // Create routine reference + let routine = dataset.routine(routineId); + + const config = { + arguments: [ + { + name: 'x', + dataType: { + typeKind: 'INT64', + }, + }, + ], + definitionBody: 'x * 3', + routineType: 'SCALAR_FUNCTION', + returnType: { + typeKind: 'INT64', + }, + }; + + // Make API call + [routine] = await routine.create(config); + + console.log(`Routine ${routineId} created.`); + } + createRoutine(); + // [END bigquery_create_routine] +} +main(...process.argv.slice(2)); diff --git a/samples/createRoutineDDL.js b/samples/createRoutineDDL.js new file mode 100644 index 00000000..3c69beec --- /dev/null +++ b/samples/createRoutineDDL.js @@ -0,0 +1,59 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + projectId = 'my_project', // GCP project + datasetId = 'my_dataset', // Existing dataset + routineId = 'my_routine' // Routine to be created +) { + // [START bigquery_create_routine_ddl] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createRoutineDDL() { + // Creates a routine using DDL. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // projectId = 'my_project'; + // const datasetId = 'my_dataset'; + // const routineId = 'my_routine'; + + const query = `CREATE FUNCTION \`${projectId}.${datasetId}.${routineId}\`( + arr ARRAY> + ) AS ( + (SELECT SUM(IF(elem.name = "foo",elem.val,null)) FROM UNNEST(arr) AS elem) + )`; + + const options = { + query: query, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + await job.getQueryResults(); + + console.log(`Routine ${routineId} created.`); + } + createRoutineDDL(); + // [END bigquery_create_routine_ddl] +} +main(...process.argv.slice(2)); diff --git a/samples/createTable.js b/samples/createTable.js new file mode 100644 index 00000000..ac00382a --- /dev/null +++ b/samples/createTable.js @@ -0,0 +1,58 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_new_table', // Table to be created + schema = [ + {name: 'Name', type: 'STRING', mode: 'REQUIRED'}, + {name: 'Age', type: 'INTEGER'}, + {name: 'Weight', type: 'FLOAT'}, + {name: 'IsMagic', type: 'BOOLEAN'}, + ] +) { + // [START bigquery_create_table] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createTable() { + // Creates a new table named "my_table" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + // const schema = 'Name:string, Age:integer, Weight:float, IsMagic:boolean'; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + schema: schema, + location: 'US', + }; + + // Create a new table in the dataset + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + + console.log(`Table ${table.id} created.`); + } + // [END bigquery_create_table] + createTable(); +} +main(...process.argv.slice(2)); diff --git a/samples/createTablePartitioned.js b/samples/createTablePartitioned.js new file mode 100644 index 00000000..7cfd03a5 --- /dev/null +++ b/samples/createTablePartitioned.js @@ -0,0 +1,54 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_create_table_partitioned] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createTablePartitioned() { + // Creates a new partitioned table named "my_table" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + const schema = 'Name:string, Post_Abbr:string, Date:date'; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + schema: schema, + location: 'US', + timePartitioning: { + type: 'DAY', + expirationMs: '7776000000', + field: 'date', + }, + }; + + // Create a new table in the dataset + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + console.log(`Table ${table.id} created with partitioning: `); + console.log(table.metadata.timePartitioning); + } + // [END bigquery_create_table_partitioned] + createTablePartitioned(datasetId, tableId); +} +main(...process.argv.slice(2)); diff --git a/samples/createTableRangePartitioned.js b/samples/createTableRangePartitioned.js new file mode 100644 index 00000000..6a70504b --- /dev/null +++ b/samples/createTableRangePartitioned.js @@ -0,0 +1,69 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_create_table_range_partitioned] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createTableRangePartitioned() { + // Creates a new integer range partitioned table named "my_table" + // in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + const schema = [ + {name: 'fullName', type: 'STRING'}, + {name: 'city', type: 'STRING'}, + {name: 'zipcode', type: 'INTEGER'}, + ]; + + // To use integer range partitioning, select a top-level REQUIRED or + // NULLABLE column with INTEGER / INT64 data type. Values that are + // outside of the range of the table will go into the UNPARTITIONED + // partition. Null values will be in the NULL partition. + const rangePartition = { + field: 'zipcode', + range: { + start: 0, + end: 100000, + interval: 10, + }, + }; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + schema: schema, + rangePartitioning: rangePartition, + }; + + // Create a new table in the dataset + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + + console.log(`Table ${table.id} created with integer range partitioning: `); + console.log(table.metadata.rangePartitioning); + } + // [END bigquery_create_table_range_partitioned] + createTableRangePartitioned(datasetId, tableId); +} +main(...process.argv.slice(2)); diff --git a/samples/createView.js b/samples/createView.js new file mode 100644 index 00000000..546c3df9 --- /dev/null +++ b/samples/createView.js @@ -0,0 +1,57 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + myDatasetId = 'my_dataset', // Existing dataset + myTableId = 'my_new_view', // View to be created + projectId = 'bigquery-public-data', // Source GCP project ID + sourceDatasetId = 'usa_names', // Source dataset ID + sourceTableId = 'usa_1910_current' //Source table ID +) { + // [START bigquery_create_view] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function createView() { + // Creates a new view named "my_shared_view" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const myDatasetId = "my_table" + // const myTableId = "my_table" + // const projectId = "bigquery-public-data"; + // const sourceDatasetId = "usa_names" + // const sourceTableId = "usa_1910_current"; + const myDataset = await bigquery.dataset(myDatasetId); + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + view: `SELECT name + FROM \`${projectId}.${sourceDatasetId}.${sourceTableId}\` + LIMIT 10`, + }; + + // Create a new view in the dataset + const [view] = await myDataset.createTable(myTableId, options); + + console.log(`View ${view.id} created.`); + } + // [END bigquery_create_view] + createView(); +} +main(...process.argv.slice(2)); diff --git a/samples/ddlCreateView.js b/samples/ddlCreateView.js new file mode 100644 index 00000000..3cd25f69 --- /dev/null +++ b/samples/ddlCreateView.js @@ -0,0 +1,65 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + projectId = 'my_project', // GCP Project ID + datasetId = 'my_dataset', // Existing dataset ID + tableId = 'my_new_view' // View to be created +) { + // [START bigquery_ddl_create_view] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function ddlCreateView() { + // Creates a view via a DDL query + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = "my_project" + // const datasetId = "my_dataset" + // const tableId = "my_new_view" + + const query = ` + CREATE VIEW \`${projectId}.${datasetId}.${tableId}\` + OPTIONS( + expiration_timestamp=TIMESTAMP_ADD( + CURRENT_TIMESTAMP(), INTERVAL 48 HOUR), + friendly_name="new_view", + description="a view that expires in 2 days", + labels=[("org_unit", "development")] + ) + AS SELECT name, state, year, number + FROM \`bigquery-public-data.usa_names.usa_1910_current\` + WHERE state LIKE 'W%'`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + + job.on('complete', metadata => { + console.log(`Created new view ${tableId} via job ${metadata.id}`); + }); + } + // [END bigquery_ddl_create_view] + ddlCreateView(); +} +main(...process.argv.slice(2)); diff --git a/samples/deleteDataset.js b/samples/deleteDataset.js index 1d7bedf2..f1018c1d 100644 --- a/samples/deleteDataset.js +++ b/samples/deleteDataset.js @@ -1,35 +1,42 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. 'use strict'; -async function deleteDataset(DATASET_ID = 'YOUR_DATASET_ID') { +function main(datasetId = 'my_dataset') { // [START bigquery_delete_dataset] - // Imports the Google Cloud client library + // Import the Google Cloud client library const {BigQuery} = require('@google-cloud/bigquery'); - - // Creates a client const bigquery = new BigQuery(); - // Creates a reference to the existing dataset - const dataset = bigquery.dataset(DATASET_ID); + async function deleteDataset() { + // Deletes a dataset named "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + + // Create a reference to the existing dataset + const dataset = bigquery.dataset(datasetId); - // Deletes the dataset - await dataset.delete(); - console.log(`Dataset ${dataset.id} deleted.`); + // Delete the dataset and its contents + await dataset.delete({force: true}); + console.log(`Dataset ${dataset.id} deleted.`); + } // [END bigquery_delete_dataset] + deleteDataset(); } -deleteDataset(...process.argv.slice(2)).catch(console.error); +main(...process.argv.slice(2)); diff --git a/samples/deleteLabelDataset.js b/samples/deleteLabelDataset.js new file mode 100644 index 00000000..bcf1434b --- /dev/null +++ b/samples/deleteLabelDataset.js @@ -0,0 +1,53 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Delete Label Dataset +// description: Deletes a label on a dataset. +// usage: node deleteLabelDataset.js + +function main( + datasetId = 'my_dataset' // Existing dataset +) { + // [START bigquery_delete_label_dataset] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function deleteLabelDataset() { + // Deletes a label on a dataset. + // This example dataset starts with existing label { color: 'green' } + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + + // Retrieve current dataset metadata. + const dataset = bigquery.dataset(datasetId); + const [metadata] = await dataset.getMetadata(); + + // Add label to dataset metadata + metadata.labels = {color: null}; + const [apiResponse] = await dataset.setMetadata(metadata); + + console.log(`${datasetId} labels:`); + console.log(apiResponse.labels); + } + // [END bigquery_delete_label_dataset] + deleteLabelDataset(); +} +main(...process.argv.slice(2)); diff --git a/samples/deleteLabelTable.js b/samples/deleteLabelTable.js new file mode 100644 index 00000000..44254703 --- /dev/null +++ b/samples/deleteLabelTable.js @@ -0,0 +1,49 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_delete_label_table] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function deleteLabelTable() { + // Deletes a label from an existing table. + // This example dataset starts with existing label { color: 'green' } + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + const dataset = bigquery.dataset(datasetId); + const [table] = await dataset.table(tableId).get(); + + // Retrieve current table metadata + const [metadata] = await table.getMetadata(); + + // Add label to table metadata + metadata.labels = {color: null}; + const [apiResponse] = await table.setMetadata(metadata); + + console.log(`${tableId} labels:`); + console.log(apiResponse.labels); + } + // [END bigquery_delete_label_table] + deleteLabelTable(); +} +main(...process.argv.slice(2)); diff --git a/samples/deleteModel.js b/samples/deleteModel.js new file mode 100644 index 00000000..e3e2c982 --- /dev/null +++ b/samples/deleteModel.js @@ -0,0 +1,41 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', modelId = 'my_model') { + // [START bigquery_delete_model] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function deleteModel() { + // Deletes a model named "my_model" from "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const modelId = "my_model"; + + const dataset = bigquery.dataset(datasetId); + const model = dataset.model(modelId); + await model.delete(); + + console.log(`Model ${modelId} deleted.`); + } + // [END bigquery_delete_model] + deleteModel(); +} +main(...process.argv.slice(2)); diff --git a/samples/deleteRoutine.js b/samples/deleteRoutine.js new file mode 100644 index 00000000..4921fe7b --- /dev/null +++ b/samples/deleteRoutine.js @@ -0,0 +1,48 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + routineId = 'my_routine' // Routine to be deleted +) { + // [START bigquery_delete_routine] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function deleteRoutine() { + // Deletes a routine named "my_routine" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const routineId = 'my_routine'; + + const dataset = bigquery.dataset(datasetId); + + // Create routine reference + let routine = dataset.routine(routineId); + + // Make API call + [routine] = await routine.delete(); + + console.log(`Routine ${routineId} deleted.`); + } + deleteRoutine(); + // [END bigquery_delete_routine] +} +main(...process.argv.slice(2)); diff --git a/samples/deleteTable.js b/samples/deleteTable.js new file mode 100644 index 00000000..474564bd --- /dev/null +++ b/samples/deleteTable.js @@ -0,0 +1,44 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_delete_table] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function deleteTable() { + // Deletes "my_table" from "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Delete the table + await bigquery + .dataset(datasetId) + .table(tableId) + .delete(); + + console.log(`Table ${tableId} deleted.`); + } + // [END bigquery_delete_table] + deleteTable(); +} + +main(...process.argv.slice(2)); diff --git a/samples/extractTableCompressed.js b/samples/extractTableCompressed.js new file mode 100644 index 00000000..280d4d1c --- /dev/null +++ b/samples/extractTableCompressed.js @@ -0,0 +1,65 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + bucketName = 'my-bucket', + filename = 'file.csv' +) { + // [START bigquery_extract_table_compressed] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + const bigquery = new BigQuery(); + const storage = new Storage(); + + async function extractTableCompressed() { + // Exports my_dataset:my_table to gcs://my-bucket/my-file as a compressed file. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + // const bucketName = "my-bucket"; + // const filename = "file.csv"; + + // Location must match that of the source table. + const options = { + location: 'US', + gzip: true, + }; + + // Export data from the table into a Google Cloud Storage file + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .extract(storage.bucket(bucketName).file(filename), options); + + console.log(`Job ${job.id} created.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_extract_table_compressed] + extractTableCompressed(); +} +main(...process.argv.slice(2)); diff --git a/samples/extractTableJSON.js b/samples/extractTableJSON.js new file mode 100644 index 00000000..6a106566 --- /dev/null +++ b/samples/extractTableJSON.js @@ -0,0 +1,65 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + bucketName = 'my-bucket', + filename = 'file.json' +) { + // [START bigquery_extract_table_json] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + const bigquery = new BigQuery(); + const storage = new Storage(); + + async function extractTableJSON() { + // Exports my_dataset:my_table to gcs://my-bucket/my-file as JSON. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + // const bucketName = "my-bucket"; + // const filename = "file.json"; + + // Location must match that of the source table. + const options = { + format: 'json', + location: 'US', + }; + + // Export data from the table into a Google Cloud Storage file + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .extract(storage.bucket(bucketName).file(filename), options); + + console.log(`Job ${job.id} created.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_extract_table_json] + extractTableJSON(); +} +main(...process.argv.slice(2)); diff --git a/samples/extractTableToGCS.js b/samples/extractTableToGCS.js new file mode 100644 index 00000000..a27fb703 --- /dev/null +++ b/samples/extractTableToGCS.js @@ -0,0 +1,64 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + bucketName = 'my-bucket', + filename = 'file.csv' +) { + // [START bigquery_extract_table] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + const bigquery = new BigQuery(); + const storage = new Storage(); + + async function extractTableToGCS() { + // Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + // const bucketName = "my-bucket"; + // const filename = "file.csv"; + + // Location must match that of the source table. + const options = { + location: 'US', + }; + + // Export data from the table into a Google Cloud Storage file + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .extract(storage.bucket(bucketName).file(filename), options); + + console.log(`Job ${job.id} created.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_extract_table] + extractTableToGCS(); +} +main(...process.argv.slice(2)); diff --git a/samples/getDataset.js b/samples/getDataset.js new file mode 100644 index 00000000..f03f5fa7 --- /dev/null +++ b/samples/getDataset.js @@ -0,0 +1,40 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset') { + // [START bigquery_get_dataset] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getDataset() { + // Retrieves dataset named "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + + // Retrieve dataset reference + const [dataset] = await bigquery.dataset(datasetId).get(); + + console.log('Dataset:'); + console.log(dataset.metadata.datasetReference); + } + getDataset(); + // [END bigquery_get_dataset] +} +main(...process.argv.slice(2)); diff --git a/samples/getDatasetLabels.js b/samples/getDatasetLabels.js new file mode 100644 index 00000000..e78e3d55 --- /dev/null +++ b/samples/getDatasetLabels.js @@ -0,0 +1,49 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Get Dataset Labels +// description: Gets labels on a dataset. +// usage: node getDatasetLabels.js + +function main(datasetId = 'my_dataset') { + // [START bigquery_get_dataset_labels] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getDatasetLabels() { + // Gets labels on a dataset. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + + // Retrieve current dataset metadata. + const dataset = bigquery.dataset(datasetId); + const [metadata] = await dataset.getMetadata(); + const labels = metadata.labels; + + console.log(`${datasetId} Labels:`); + for (const [key, value] of Object.entries(labels)) { + console.log(`${key}: ${value}`); + } + } + getDatasetLabels(); + // [END bigquery_get_dataset_labels] +} +main(...process.argv.slice(2)); diff --git a/samples/getJob.js b/samples/getJob.js new file mode 100644 index 00000000..8be3b946 --- /dev/null +++ b/samples/getJob.js @@ -0,0 +1,42 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(jobId = 'existing-job-id') { + // [START bigquery_get_job] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getJob() { + // Get job properties. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const jobId = "existing-job-id"; + + // Create a job reference + const job = bigquery.job(jobId); + + // Retrieve job + const [jobResult] = await job.get(); + + console.log(jobResult.metadata.jobReference); + } + // [END bigquery_get_job] + getJob(); +} +main(...process.argv.slice(2)); diff --git a/samples/getModel.js b/samples/getModel.js new file mode 100644 index 00000000..8f4ec2da --- /dev/null +++ b/samples/getModel.js @@ -0,0 +1,46 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: BigQuery Get Model +// description: Retrieves an existing model from a dataset. +// usage: node getModel.js + +function main(datasetId = 'my_dataset', modelId = 'my_existing_model') { + // [START bigquery_get_model] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getModel() { + // Retrieves model named "my_existing_model" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const modelId = "my_existing_model"; + + const dataset = bigquery.dataset(datasetId); + const [model] = await dataset.model(modelId).get(); + + console.log('Model:'); + console.log(model.metadata.modelReference); + } + // [END bigquery_get_model] + getModel(); +} +main(...process.argv.slice(2)); diff --git a/samples/getRoutine.js b/samples/getRoutine.js new file mode 100644 index 00000000..46213c70 --- /dev/null +++ b/samples/getRoutine.js @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + routineId = 'my_routine' // Existing routine +) { + // [START bigquery_get_routine] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getRoutine() { + // Gets an existing routine named "my_routine" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const routineId = 'my_routine'; + + const dataset = bigquery.dataset(datasetId); + + // Create routine reference and make API call + const [routine] = await dataset.routine(routineId).get(); + + console.log( + `Routine ${routine.metadata.routineReference.routineId} retrieved.` + ); + } + getRoutine(); + // [END bigquery_get_routine] +} +main(...process.argv.slice(2)); diff --git a/samples/getTable.js b/samples/getTable.js new file mode 100644 index 00000000..7432185a --- /dev/null +++ b/samples/getTable.js @@ -0,0 +1,47 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: BigQuery Get Table +// description: Retrieves an existing table from a dataset. +// usage: node getTable.js + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_get_table] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getTable() { + // Retrieves table named "my_table" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Retrieve table reference + const dataset = bigquery.dataset(datasetId); + const [table] = await dataset.table(tableId).get(); + + console.log('Table:'); + console.log(table.metadata.tableReference); + } + getTable(); + // [END bigquery_get_table] +} +main(...process.argv.slice(2)); diff --git a/samples/getTableLabels.js b/samples/getTableLabels.js new file mode 100644 index 00000000..437b1304 --- /dev/null +++ b/samples/getTableLabels.js @@ -0,0 +1,50 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Get Table Labels +// description: Gets labels on a dataset. +// usage: node getTableLabels.js + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_get_table_labels] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getTableLabels() { + // Gets labels on a dataset. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Retrieve current dataset metadata. + const table = bigquery.dataset(datasetId).table(tableId); + const [metadata] = await table.getMetadata(); + const labels = metadata.labels; + + console.log(`${tableId} Labels:`); + for (const [key, value] of Object.entries(labels)) { + console.log(`${key}: ${value}`); + } + } + getTableLabels(); + // [END bigquery_get_table_labels] +} +main(...process.argv.slice(2)); diff --git a/samples/getView.js b/samples/getView.js new file mode 100644 index 00000000..352a2bde --- /dev/null +++ b/samples/getView.js @@ -0,0 +1,49 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset ID + tableId = 'my_view' // Existing table ID +) { + // [START bigquery_get_view] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function getView() { + // Retrieves view properties. + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const tableId = "my_view"; + + // Retrieve view + const dataset = bigquery.dataset(datasetId); + const [view] = await dataset.table(tableId).get(); + + const fullTableId = view.metadata.id; + const viewQuery = view.metadata.view.query; + + // Display view properties + console.log(`View at ${fullTableId}`); + console.log(`View query: ${viewQuery}`); + } + getView(); + // [END bigquery_get_view] +} +main(...process.argv.slice(2)); diff --git a/samples/insertRowsAsStream.js b/samples/insertRowsAsStream.js new file mode 100644 index 00000000..389db3d8 --- /dev/null +++ b/samples/insertRowsAsStream.js @@ -0,0 +1,46 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_table_insert_rows] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function insertRowsAsStream() { + // Inserts the JSON objects into my_dataset:my_table. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + const rows = [ + {name: 'Tom', age: 30}, + {name: 'Jane', age: 32}, + ]; + + // Insert data into a table + await bigquery + .dataset(datasetId) + .table(tableId) + .insert(rows); + console.log(`Inserted ${rows.length} rows`); + } + // [END bigquery_table_insert_rows] + insertRowsAsStream(); +} +main(...process.argv.slice(2)); diff --git a/samples/insertingDataTypes.js b/samples/insertingDataTypes.js new file mode 100644 index 00000000..55e286dc --- /dev/null +++ b/samples/insertingDataTypes.js @@ -0,0 +1,145 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_inserting_data_types] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function instertingDataTypes() { + // Inserts data of various BigQuery-supported types into a table. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Describe the schema of the table + // For more information on supported data types, see + // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types + const schema = [ + { + name: 'name', + type: 'STRING', + }, + { + name: 'age', + type: 'INTEGER', + }, + { + name: 'school', + type: 'BYTES', + }, + { + name: 'location', + type: 'GEOGRAPHY', + }, + { + name: 'measurements', + mode: 'REPEATED', + type: 'FLOAT', + }, + { + name: 'datesTimes', + type: 'RECORD', + fields: [ + { + name: 'day', + type: 'DATE', + }, + { + name: 'firstTime', + type: 'DATETIME', + }, + { + name: 'secondTime', + type: 'TIME', + }, + { + name: 'thirdTime', + type: 'TIMESTAMP', + }, + ], + }, + ]; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + schema: schema, + }; + + // Create a new table in the dataset + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + + console.log(`Table ${table.id} created.`); + + // The DATE type represents a logical calendar date, independent of time zone. + // A DATE value does not represent a specific 24-hour time period. + // Rather, a given DATE value represents a different 24-hour period when + // interpreted in different time zones, and may represent a shorter or longer + // day during Daylight Savings Time transitions. + const bqDate = bigquery.date('2019-1-12'); + // A DATETIME object represents a date and time, as they might be + // displayed on a calendar or clock, independent of time zone. + const bqDatetime = bigquery.datetime('2019-02-17 11:24:00.000'); + // A TIME object represents a time, as might be displayed on a watch, + // independent of a specific date and timezone. + const bqTime = bigquery.time('14:00:00'); + // A TIMESTAMP object represents an absolute point in time, + // independent of any time zone or convention such as Daylight + // Savings Time with microsecond precision. + const bqTimestamp = bigquery.timestamp('2020-04-27T18:07:25.356Z'); + const bqGeography = bigquery.geography('POINT(1 2)'); + const schoolBuffer = Buffer.from('Test University'); + + // Rows to be inserted into table + const rows = [ + { + name: 'Tom', + age: '30', + location: bqGeography, + school: schoolBuffer, + measurements: [50.05, 100.5], + datesTimes: { + day: bqDate, + firstTime: bqDatetime, + secondTime: bqTime, + thirdTime: bqTimestamp, + }, + }, + { + name: 'Ada', + age: '35', + measurements: [30.08, 121.7], + }, + ]; + + // Insert data into table + await bigquery + .dataset(datasetId) + .table(tableId) + .insert(rows); + + console.log(`Inserted ${rows.length} rows`); + } + // [END bigquery_inserting_data_types] + instertingDataTypes(); +} +main(...process.argv.slice(2)); diff --git a/samples/labelDataset.js b/samples/labelDataset.js new file mode 100644 index 00000000..c170c7f2 --- /dev/null +++ b/samples/labelDataset.js @@ -0,0 +1,50 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: BigQuery Label Dataset +// description: Updates a label on a dataset. +// usage: node labelDataset.js + +function main(datasetId = 'my_dataset') { + // [START bigquery_label_dataset] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function labelDataset() { + // Updates a label on a dataset. + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + + // Retrieve current dataset metadata. + const dataset = bigquery.dataset(datasetId); + const [metadata] = await dataset.getMetadata(); + + // Add label to dataset metadata + metadata.labels = {color: 'green'}; + const [apiResponse] = await dataset.setMetadata(metadata); + + console.log(`${datasetId} labels:`); + console.log(apiResponse.labels); + } + // [END bigquery_label_dataset] + labelDataset(); +} +main(...process.argv.slice(2)); diff --git a/samples/labelTable.js b/samples/labelTable.js new file mode 100644 index 00000000..64e1cc37 --- /dev/null +++ b/samples/labelTable.js @@ -0,0 +1,48 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_label_table] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function labelTable() { + // Adds a label to an existing table. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + const dataset = bigquery.dataset(datasetId); + const [table] = await dataset.table(tableId).get(); + + // Retrieve current table metadata + const [metadata] = await table.getMetadata(); + + // Add label to table metadata + metadata.labels = {color: 'green'}; + const [apiResponse] = await table.setMetadata(metadata); + + console.log(`${tableId} labels:`); + console.log(apiResponse.labels); + } + // [END bigquery_label_table] + labelTable(); +} +main(...process.argv.slice(2)); diff --git a/samples/listDatasets.js b/samples/listDatasets.js index 75088081..961d6ab4 100644 --- a/samples/listDatasets.js +++ b/samples/listDatasets.js @@ -1,32 +1,34 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + 'use strict'; -async function listDatasets() { +function main() { // [START bigquery_list_datasets] - // Imports the Google Cloud client library + // Import the Google Cloud client library const {BigQuery} = require('@google-cloud/bigquery'); - - // Creates a client const bigquery = new BigQuery(); - // Lists all datasets in the specified project - const [datasets] = await bigquery.getDatasets(); - console.log('Datasets:'); - datasets.forEach(dataset => console.log(dataset.id)); + async function listDatasets() { + // Lists all datasets in current GCP project. + + // Lists all datasets in the specified project + const [datasets] = await bigquery.getDatasets(); + console.log('Datasets:'); + datasets.forEach(dataset => console.log(dataset.id)); + } // [END bigquery_list_datasets] + listDatasets(); } - -listDatasets(...process.argv.slice(2)).catch(console.error); +main(...process.argv.slice(2)); diff --git a/samples/listDatasetsByLabel.js b/samples/listDatasetsByLabel.js new file mode 100644 index 00000000..df5cc171 --- /dev/null +++ b/samples/listDatasetsByLabel.js @@ -0,0 +1,38 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_list_datasets_by_label] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function listDatasetsByLabel() { + // Lists all datasets in current GCP project, filtering by label color:green. + + const options = { + filter: 'labels.color:green', + }; + // Lists all datasets in the specified project + const [datasets] = await bigquery.getDatasets(options); + + console.log('Datasets:'); + datasets.forEach(dataset => console.log(dataset.id)); + } + // [END bigquery_list_datasets_by_label] + listDatasetsByLabel(); +} +main(...process.argv.slice(2)); diff --git a/samples/listJobs.js b/samples/listJobs.js new file mode 100644 index 00000000..64350cdb --- /dev/null +++ b/samples/listJobs.js @@ -0,0 +1,37 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_list_jobs] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function listJobs() { + // Lists all jobs in current GCP project. + + // List the 10 most recent jobs in reverse chronological order. + // Omit the max_results parameter to list jobs from the past 6 months. + const options = {maxResults: 10}; + const [jobs] = await bigquery.getJobs(options); + + console.log('Jobs:'); + jobs.forEach(job => console.log(job.id)); + } + // [END bigquery_list_jobs] + listJobs(); +} +main(...process.argv.slice(2)); diff --git a/samples/listModels.js b/samples/listModels.js new file mode 100644 index 00000000..12b42a8e --- /dev/null +++ b/samples/listModels.js @@ -0,0 +1,49 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: BigQuery List Models +// description: Lists all existing models in the dataset. +// usage: node listModels.js + +function main(datasetId = 'my_dataset') { + // [START bigquery_list_models] + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function listModels() { + // Lists all existing models in the dataset. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + + const dataset = bigquery.dataset(datasetId); + + dataset.getModels().then(data => { + const models = data[0]; + console.log('Models:'); + models.forEach(model => console.log(model.metadata)); + }); + } + // [END bigquery_list_models] + listModels(); +} + +main(...process.argv.slice(2)); diff --git a/samples/listModelsStreaming.js b/samples/listModelsStreaming.js new file mode 100644 index 00000000..8d9e22b6 --- /dev/null +++ b/samples/listModelsStreaming.js @@ -0,0 +1,53 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: BigQuery List Models Streaming +// description: Lists all existing models in the dataset using streaming method. +// usage: node listModelsStreaming.js + +function main(datasetId = 'my_dataset') { + // [START bigquery_list_models_streaming] + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function listModels() { + // Lists all existing models in the dataset using streaming method. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + + const dataset = bigquery.dataset(datasetId); + + dataset + .getModelsStream() + .on('error', console.error) + .on('data', model => { + console.log(model.metadata); + }) + .on('end', () => { + console.log('All models have been retrieved.'); + }); + } + // [END bigquery_list_models_streaming] + listModels(); +} + +main(...process.argv.slice(2)); diff --git a/samples/listRoutines.js b/samples/listRoutines.js new file mode 100644 index 00000000..cdeb504b --- /dev/null +++ b/samples/listRoutines.js @@ -0,0 +1,42 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset' // Existing dataset +) { + // [START bigquery_list_routines] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function listRoutines() { + // Lists routines in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + + // List all routines in the dataset + const [routines] = await bigquery.dataset(datasetId).getRoutines(); + + console.log('Routines:'); + routines.forEach(routine => console.log(routine.id)); + } + listRoutines(); + // [END bigquery_list_routines] +} +main(...process.argv.slice(2)); diff --git a/samples/listTables.js b/samples/listTables.js new file mode 100644 index 00000000..a617510c --- /dev/null +++ b/samples/listTables.js @@ -0,0 +1,41 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset') { + // [START bigquery_list_tables] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function listTables() { + // Lists tables in 'my_dataset'. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + + // List all tables in the dataset + const [tables] = await bigquery.dataset(datasetId).getTables(); + + console.log('Tables:'); + tables.forEach(table => console.log(table.id)); + } + // [END bigquery_list_tables] + listTables(); +} + +main(...process.argv.slice(2)); diff --git a/samples/loadCSVFromGCS.js b/samples/loadCSVFromGCS.js new file mode 100644 index 00000000..c0c91264 --- /dev/null +++ b/samples/loadCSVFromGCS.js @@ -0,0 +1,77 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_csv] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the CSV file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.csv'; + + async function loadCSVFromGCS() { + // Imports a GCS file into a table with manually defined schema. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'CSV', + skipLeadingRows: 1, + schema: { + fields: [ + {name: 'name', type: 'STRING'}, + {name: 'post_abbr', type: 'STRING'}, + ], + }, + location: 'US', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_csv] + loadCSVFromGCS(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadCSVFromGCSAutodetect.js b/samples/loadCSVFromGCSAutodetect.js new file mode 100644 index 00000000..5d4120ba --- /dev/null +++ b/samples/loadCSVFromGCSAutodetect.js @@ -0,0 +1,71 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_csv_autodetect] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + /** + * This sample loads the CSV file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv + * + * TODO(developer): Replace the following lines with the path to your file + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.csv'; + + async function loadCSVFromGCSAutodetect() { + // Imports a GCS file into a table with autodetected schema. + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'CSV', + skipLeadingRows: 1, + autodetect: true, + location: 'US', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_csv_autodetect] + loadCSVFromGCSAutodetect(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadCSVFromGCSTruncate.js b/samples/loadCSVFromGCSTruncate.js new file mode 100644 index 00000000..20b419e9 --- /dev/null +++ b/samples/loadCSVFromGCSTruncate.js @@ -0,0 +1,83 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_csv_truncate] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the CSV file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.csv'; + + async function loadCSVFromGCSTruncate() { + /** + * Imports a GCS file into a table and overwrites + * table data if table already exists. + */ + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'CSV', + skipLeadingRows: 1, + schema: { + fields: [ + {name: 'name', type: 'STRING'}, + {name: 'post_abbr', type: 'STRING'}, + ], + }, + // Set the write disposition to overwrite existing table data. + writeDisposition: 'WRITE_TRUNCATE', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + console.log( + `Write disposition used: ${job.configuration.load.writeDisposition}.` + ); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_csv_truncate] + loadCSVFromGCSTruncate(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadJSONFromGCS.js b/samples/loadJSONFromGCS.js new file mode 100644 index 00000000..cf672a36 --- /dev/null +++ b/samples/loadJSONFromGCS.js @@ -0,0 +1,76 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_json] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the json file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.json + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.json'; + + async function loadJSONFromGCS() { + // Imports a GCS file into a table with manually defined schema. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'NEWLINE_DELIMITED_JSON', + schema: { + fields: [ + {name: 'name', type: 'STRING'}, + {name: 'post_abbr', type: 'STRING'}, + ], + }, + location: 'US', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_json] + loadJSONFromGCS(); +} + +main(...process.argv.slice(2)); diff --git a/samples/loadJSONFromGCSAutodetect.js b/samples/loadJSONFromGCSAutodetect.js new file mode 100644 index 00000000..e9163870 --- /dev/null +++ b/samples/loadJSONFromGCSAutodetect.js @@ -0,0 +1,70 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_json_autodetect] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + /** + * This sample loads the JSON file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.json + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.json'; + + async function loadJSONFromGCSAutodetect() { + // Imports a GCS file into a table with autodetected schema. + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'NEWLINE_DELIMITED_JSON', + autodetect: true, + location: 'US', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + loadJSONFromGCSAutodetect(); + // [END bigquery_load_table_gcs_json_autodetect] +} +main(...process.argv.slice(2)); diff --git a/samples/loadJSONFromGCSTruncate.js b/samples/loadJSONFromGCSTruncate.js new file mode 100644 index 00000000..4d97aab4 --- /dev/null +++ b/samples/loadJSONFromGCSTruncate.js @@ -0,0 +1,82 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_json_truncate] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the JSON file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.json + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.json'; + + async function loadJSONFromGCSTruncate() { + /** + * Imports a GCS file into a table and overwrites + * table data if table already exists. + */ + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'NEWLINE_DELIMITED_JSON', + schema: { + fields: [ + {name: 'name', type: 'STRING'}, + {name: 'post_abbr', type: 'STRING'}, + ], + }, + // Set the write disposition to overwrite existing table data. + writeDisposition: 'WRITE_TRUNCATE', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + console.log( + `Write disposition used: ${job.configuration.load.writeDisposition}.` + ); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_json_truncate] + loadJSONFromGCSTruncate(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadLocalFile.js b/samples/loadLocalFile.js new file mode 100644 index 00000000..2b1e234f --- /dev/null +++ b/samples/loadLocalFile.js @@ -0,0 +1,55 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + filename = '/path/to/file.csv' +) { + // [START bigquery_load_from_file] + // Imports the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function loadLocalFile() { + // Imports a local file into a table. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const filename = '/path/to/file.csv'; + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Load data from a local file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(filename); + + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_from_file] + loadLocalFile(); +} + +main(...process.argv.slice(2)); diff --git a/samples/loadOrcFromGCSTruncate.js b/samples/loadOrcFromGCSTruncate.js new file mode 100644 index 00000000..2bdc2827 --- /dev/null +++ b/samples/loadOrcFromGCSTruncate.js @@ -0,0 +1,76 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_orc_truncate] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate the clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the CSV file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.orc'; + + async function loadORCFromGCSTruncate() { + /** + * Imports a GCS file into a table and overwrites + * table data if table already exists. + */ + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'ORC', + // Set the write disposition to overwrite existing table data. + writeDisposition: 'WRITE_TRUNCATE', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + console.log( + `Write disposition used: ${job.configuration.load.writeDisposition}.` + ); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_orc_truncate] + loadORCFromGCSTruncate(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadParquetFromGCSTruncate.js b/samples/loadParquetFromGCSTruncate.js new file mode 100644 index 00000000..d6eb39a3 --- /dev/null +++ b/samples/loadParquetFromGCSTruncate.js @@ -0,0 +1,77 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_parquet_truncate] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the CSV file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.parquet'; + + async function loadParquetFromGCSTruncate() { + /** + * Imports a GCS file into a table and overwrites + * table data if table already exists. + */ + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'PARQUET', + // Set the write disposition to overwrite existing table data. + writeDisposition: 'WRITE_TRUNCATE', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + console.log( + `Write disposition used: ${job.configuration.load.writeDisposition}.` + ); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_parquet_truncate] + loadParquetFromGCSTruncate(); +} + +main(...process.argv.slice(2)); diff --git a/samples/loadTableGCSAvro.js b/samples/loadTableGCSAvro.js new file mode 100644 index 00000000..4806e342 --- /dev/null +++ b/samples/loadTableGCSAvro.js @@ -0,0 +1,72 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset ID + tableId = 'us_states' // Existing table ID +) { + // [START bigquery_load_table_gcs_avro] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the Avro file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.avro + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.avro'; + + async function loadTableGCSAvro() { + // Imports a GCS file into a table with Avro source format. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'us_states'; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const jobConfigurationLoad = { + sourceFormat: 'AVRO', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), jobConfigurationLoad); + + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_avro] + loadTableGCSAvro(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadTableGCSAvroTruncate.js b/samples/loadTableGCSAvroTruncate.js new file mode 100644 index 00000000..d089ea48 --- /dev/null +++ b/samples/loadTableGCSAvroTruncate.js @@ -0,0 +1,79 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset ID + tableId = 'us_states' // Existing table ID +) { + // [START bigquery_load_table_gcs_avro_truncate] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the Avro file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.avro + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.avro'; + + async function loadTableGCSAvroTruncate() { + /** + * Imports a GCS file into a table and overwrites + * table data if table already exists. + */ + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'us_states'; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const jobConfigurationLoad = { + sourceFormat: 'AVRO', + writeDisposition: 'WRITE_TRUNCATE', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), jobConfigurationLoad); + + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + console.log( + `Write disposition used: ${job.configuration.load.writeDisposition}.` + ); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_avro_truncate] + loadTableGCSAvroTruncate(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadTableGCSORC.js b/samples/loadTableGCSORC.js new file mode 100644 index 00000000..1dcfddb2 --- /dev/null +++ b/samples/loadTableGCSORC.js @@ -0,0 +1,70 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_orc] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the ORC file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.orc + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.orc'; + + async function loadTableGCSORC() { + // Imports a GCS file into a table with ORC source format. + + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table' + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'ORC', + location: 'US', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_orc] + loadTableGCSORC(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadTableGCSParquet.js b/samples/loadTableGCSParquet.js new file mode 100644 index 00000000..cc3e3110 --- /dev/null +++ b/samples/loadTableGCSParquet.js @@ -0,0 +1,70 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_load_table_gcs_parquet] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the Parquet file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.parquet + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states.parquet'; + + async function loadTableGCSParquet() { + // Imports a GCS file into a table with Parquet source format. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const metadata = { + sourceFormat: 'PARQUET', + location: 'US', + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_gcs_parquet] + loadTableGCSParquet(); +} +main(...process.argv.slice(2)); diff --git a/samples/loadTablePartitioned.js b/samples/loadTablePartitioned.js new file mode 100644 index 00000000..5e48cab9 --- /dev/null +++ b/samples/loadTablePartitioned.js @@ -0,0 +1,85 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_new_table') { + // [START bigquery_load_table_partitioned] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + const {Storage} = require('@google-cloud/storage'); + + // Instantiate clients + const bigquery = new BigQuery(); + const storage = new Storage(); + + /** + * This sample loads the CSV file at + * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv + * + * TODO(developer): Replace the following lines with the path to your file. + */ + const bucketName = 'cloud-samples-data'; + const filename = 'bigquery/us-states/us-states-by-date.csv'; + + async function loadTablePartitioned() { + // Load data into a table that uses column-based time partitioning. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_new_table'; + + // Configure the load job. For full list of options, see: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad + const partitionConfig = { + type: 'DAY', + expirationMs: '7776000000', // 90 days + field: 'date', + }; + + const metadata = { + sourceFormat: 'CSV', + skipLeadingRows: 1, + schema: { + fields: [ + {name: 'name', type: 'STRING'}, + {name: 'post_abbr', type: 'STRING'}, + {name: 'date', type: 'DATE'}, + ], + }, + location: 'US', + timePartitioning: partitionConfig, + }; + + // Load data from a Google Cloud Storage file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(storage.bucket(bucketName).file(filename), metadata); + + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_load_table_partitioned] + loadTablePartitioned(); +} +main(...process.argv.slice(2)); diff --git a/samples/nestedRepeatedSchema.js b/samples/nestedRepeatedSchema.js new file mode 100644 index 00000000..2bf6ebf4 --- /dev/null +++ b/samples/nestedRepeatedSchema.js @@ -0,0 +1,80 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_new_table', // Table to be created + schema = [ + {name: 'Name', type: 'STRING', mode: 'REQUIRED'}, + { + name: 'Addresses', + type: 'RECORD', + mode: 'REPEATED', + fields: [ + {name: 'Address', type: 'STRING'}, + {name: 'City', type: 'STRING'}, + {name: 'State', type: 'STRING'}, + {name: 'Zip', type: 'STRING'}, + ], + }, + ] +) { + // [START bigquery_nested_repeated_schema] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function nestedRepeatedSchema() { + // Creates a new table named "my_table" in "my_dataset" + // with nested and repeated columns in schema. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + // const schema = [ + // {name: 'Name', type: 'STRING', mode: 'REQUIRED'}, + // { + // name: 'Addresses', + // type: 'RECORD', + // mode: 'REPEATED', + // fields: [ + // {name: 'Address', type: 'STRING'}, + // {name: 'City', type: 'STRING'}, + // {name: 'State', type: 'STRING'}, + // {name: 'Zip', type: 'STRING'}, + // ], + // }, + // ]; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + schema: schema, + location: 'US', + }; + + // Create a new table in the dataset + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + + console.log(`Table ${table.id} created.`); + } + // [END bigquery_nested_repeated_schema] + nestedRepeatedSchema(); +} +main(...process.argv.slice(2)); diff --git a/samples/package.json b/samples/package.json index 424c924c..8b4527b9 100644 --- a/samples/package.json +++ b/samples/package.json @@ -2,7 +2,8 @@ "name": "nodejs-docs-samples-bigquery", "files": [ "*.js", - "resources/" + "resources/", + "auth-user-sample/oauth2.keys.json" ], "private": true, "license": "Apache-2.0", @@ -12,17 +13,20 @@ "node": ">=8" }, "scripts": { - "test": "mocha --timeout 60000" + "test": "mocha --timeout 200000" }, "dependencies": { - "@google-cloud/bigquery": "^2.1.0", - "@google-cloud/storage": "^2.0.0", - "yargs": "^13.0.0" + "@google-cloud/bigquery": "^5.7.0", + "@google-cloud/storage": "^5.0.0", + "google-auth-library": "^7.0.0", + "readline-promise": "^1.0.4", + "yargs": "^17.0.0" }, "devDependencies": { "chai": "^4.2.0", - "execa": "^1.0.0", - "mocha": "^6.0.0", - "uuid": "^3.3.0" + "mocha": "^8.0.0", + "proxyquire": "^2.1.3", + "sinon": "^11.0.0", + "uuid": "^8.0.0" } } diff --git a/samples/queries.js b/samples/queries.js deleted file mode 100644 index ee38e342..00000000 --- a/samples/queries.js +++ /dev/null @@ -1,150 +0,0 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -async function queryStackOverflow() { - // [START bigquery_simple_app_all] - // [START bigquery_simple_app_deps] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - // [END bigquery_simple_app_deps] - - // [START bigquery_simple_app_client] - // Creates a client - const bigquery = new BigQuery(); - // [END bigquery_simple_app_client] - - // [START bigquery_simple_app_query] - // The SQL query to run - const sqlQuery = `SELECT - CONCAT( - 'https://stackoverflow.com/questions/', - CAST(id as STRING)) as url, - view_count - FROM \`bigquery-public-data.stackoverflow.posts_questions\` - WHERE tags like '%google-bigquery%' - ORDER BY view_count DESC - LIMIT 10`; - - const options = { - query: sqlQuery, - // Location must match that of the dataset(s) referenced in the query. - location: 'US', - }; - - // Runs the query - const [rows] = await bigquery.query(options); - // [END bigquery_simple_app_query] - - // [START bigquery_simple_app_print] - console.log('Query Results:'); - rows.forEach(row => { - const url = row['url']; - const viewCount = row['view_count']; - console.log(`url: ${url}, ${viewCount} views`); - }); - // [END bigquery_simple_app_print] - // [END bigquery_simple_app_all] -} - -async function query() { - // [START bigquery_query] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - // Creates a client - const bigquery = new BigQuery(); - - const query = `SELECT name - FROM \`bigquery-public-data.usa_names.usa_1910_2013\` - WHERE state = 'TX' - LIMIT 100`; - const options = { - query: query, - // Location must match that of the dataset(s) referenced in the query. - location: 'US', - }; - - // Runs the query as a job - const [job] = await bigquery.createQueryJob(options); - console.log(`Job ${job.id} started.`); - - // Waits for the query to finish - const [rows] = await job.getQueryResults(); - - // Prints the results - console.log('Rows:'); - rows.forEach(row => console.log(row)); - // [END bigquery_query] -} - -async function queryDisableCache() { - // [START bigquery_query_no_cache] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - // Creates a client - const bigquery = new BigQuery(); - - const query = `SELECT corpus - FROM \`bigquery-public-data.samples.shakespeare\` - GROUP BY corpus`; - const options = { - query: query, - // Location must match that of the dataset(s) referenced in the query. - location: 'US', - useQueryCache: false, - }; - - // Runs the query as a job - const [job] = await bigquery.createQueryJob(options); - console.log(`Job ${job.id} started.`); - - // Waits for the query to finish - const [rows] = await job.getQueryResults(); - - // Prints the results - console.log('Rows:'); - rows.forEach(row => console.log(row)); - // [END bigquery_query_no_cache] -} - -require(`yargs`) - .demand(1) - .command( - `stackoverflow`, - `Queries a public Stack Overflow dataset.`, - {}, - queryStackOverflow - ) - .command(`query`, `Queries the US Names dataset.`, {}, query) - .command( - `disable-cache`, - `Queries the Shakespeare dataset with the cache disabled.`, - {}, - queryDisableCache - ) - .example(`node $0 stackoverflow`, `Queries a public Stackoverflow dataset.`) - .example(`node $0 query`, `Queries the US Names dataset.`) - .example( - `node $0 disable-cache`, - `Queries the Shakespeare dataset with the cache disabled.` - ) - .wrap(120) - .recommendCommands() - .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`) - .help() - .strict().argv; diff --git a/samples/query.js b/samples/query.js new file mode 100644 index 00000000..909be903 --- /dev/null +++ b/samples/query.js @@ -0,0 +1,53 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query] + // [START bigquery_client_default_credentials] + // Import the Google Cloud client library using default credentials + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + // [END bigquery_client_default_credentials] + async function query() { + // Queries the U.S. given names dataset for the state of Texas. + + const query = `SELECT name + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE state = 'TX' + LIMIT 100`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query] + query(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryBatch.js b/samples/queryBatch.js new file mode 100644 index 00000000..695199fe --- /dev/null +++ b/samples/queryBatch.js @@ -0,0 +1,55 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_batch] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryBatch() { + // Runs a query at batch priority. + + // Create query job configuration. For all options, see + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery + const queryJobConfig = { + query: `SELECT corpus + FROM \`bigquery-public-data.samples.shakespeare\` + LIMIT 10`, + useLegacySql: false, + priority: 'BATCH', + }; + + // Create job configuration. For all options, see + // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfiguration + const jobConfig = { + // Specify a job configuration to set optional job resource properties. + configuration: { + query: queryJobConfig, + }, + }; + + // Make API request. + const [job] = await bigquery.createJob(jobConfig); + + const jobId = job.metadata.id; + const state = job.metadata.status.state; + console.log(`Job ${jobId} is currently in state ${state}`); + } + // [END bigquery_query_batch] + queryBatch(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryDestinationTable.js b/samples/queryDestinationTable.js new file mode 100644 index 00000000..3925a54c --- /dev/null +++ b/samples/queryDestinationTable.js @@ -0,0 +1,59 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_query_destination_table] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryDestinationTable() { + // Queries the U.S. given names dataset for the state of Texas + // and saves results to permanent table. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // Create destination table reference + const dataset = bigquery.dataset(datasetId); + const destinationTable = dataset.table(tableId); + + const query = `SELECT name + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE state = 'TX' + LIMIT 100`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + query: query, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + destination: destinationTable, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + + console.log(`Job ${job.id} started.`); + console.log(`Query results loaded to table ${destinationTable.id}`); + } + // [END bigquery_query_destination_table] + queryDestinationTable(datasetId, tableId); +} +main(...process.argv.slice(2)); diff --git a/samples/queryDisableCache.js b/samples/queryDisableCache.js new file mode 100644 index 00000000..4b777b20 --- /dev/null +++ b/samples/queryDisableCache.js @@ -0,0 +1,52 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_no_cache] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + + async function queryDisableCache() { + // Queries the Shakespeare dataset with the cache disabled. + + // Create a client + const bigquery = new BigQuery(); + + const query = `SELECT corpus + FROM \`bigquery-public-data.samples.shakespeare\` + GROUP BY corpus`; + const options = { + query: query, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + useQueryCache: false, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_no_cache] + queryDisableCache(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryDryRun.js b/samples/queryDryRun.js new file mode 100644 index 00000000..0e916786 --- /dev/null +++ b/samples/queryDryRun.js @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_dry_run] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryDryRun() { + // Runs a dry query of the U.S. given names dataset for the state of Texas. + + const query = `SELECT name + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE state = 'TX' + LIMIT 100`; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + dryRun: true, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + + // Print the status and statistics + console.log('Status:'); + console.log(job.metadata.status); + console.log('\nJob Statistics:'); + console.log(job.metadata.statistics); + } + // [END bigquery_query_dry_run] + queryDryRun(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryExternalGCSPerm.js b/samples/queryExternalGCSPerm.js new file mode 100644 index 00000000..13d11d56 --- /dev/null +++ b/samples/queryExternalGCSPerm.js @@ -0,0 +1,79 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_new_table', // Table to be created + schema = [ + {name: 'name', type: 'STRING'}, + {name: 'post_abbr', type: 'STRING'}, + ] +) { + // [START bigquery_query_external_gcs_perm] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryExternalGCSPerm() { + // Queries an external data source using a permanent table + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + // Configure the external data source + const dataConfig = { + sourceFormat: 'CSV', + sourceUris: ['gs://cloud-samples-data/bigquery/us-states/us-states.csv'], + // Optionally skip header row + csvOptions: {skipLeadingRows: 1}, + }; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + const options = { + schema: schema, + externalDataConfiguration: dataConfig, + }; + + // Create an external table linked to the GCS file + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + + console.log(`Table ${table.id} created.`); + + // Example query to find states starting with 'W' + const query = `SELECT post_abbr + FROM \`${datasetId}.${tableId}\` + WHERE name LIKE 'W%'`; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(query); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + console.log(rows); + } + // [END bigquery_query_external_gcs_perm] + queryExternalGCSPerm(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryLegacy.js b/samples/queryLegacy.js new file mode 100644 index 00000000..701c4a68 --- /dev/null +++ b/samples/queryLegacy.js @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_legacy] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryLegacy() { + // Queries the U.S. given names dataset for the state of Texas using legacy SQL. + + const query = + 'SELECT word FROM [bigquery-public-data:samples.shakespeare] LIMIT 10;'; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + useLegacySql: true, + }; + + // Run the query as a job + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_legacy] + queryLegacy(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryLegacyLargeResults.js b/samples/queryLegacyLargeResults.js new file mode 100644 index 00000000..45689390 --- /dev/null +++ b/samples/queryLegacyLargeResults.js @@ -0,0 +1,67 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + projectId = 'my_project' +) { + // [START bigquery_query_legacy_large_results] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryLegacyLargeResults() { + // Query enables large result sets. + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const projectId = "my_project" + // const datasetId = "my_dataset"; + // const tableId = "my_table"; + + const query = + 'SELECT word FROM [bigquery-public-data:samples.shakespeare] LIMIT 10;'; + + // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + const options = { + query: query, + // Location must match that of the dataset(s) referenced + // in the query and of the destination table. + useLegacySql: true, + allowLargeResult: true, + destinationTable: { + projectId: projectId, + datasetId: datasetId, + tableId: tableId, + }, + }; + + const [job] = await bigquery.createQueryJob(options); + console.log(`Job ${job.id} started.`); + + // Wait for the query to finish + const [rows] = await job.getQueryResults(); + + // Print the results + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_legacy_large_results] + queryLegacyLargeResults(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryPagination.js b/samples/queryPagination.js new file mode 100644 index 00000000..88e0151f --- /dev/null +++ b/samples/queryPagination.js @@ -0,0 +1,46 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_pagination] + // Import the Google Cloud client library using default credentials + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryPagination() { + // Run a query and get rows using automatic pagination. + + const query = `SELECT name, SUM(number) as total_people + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + GROUP BY name + ORDER BY total_people DESC + LIMIT 100`; + + // Run the query as a job. + const [job] = await bigquery.createQueryJob(query); + + // Wait for job to complete and get rows. + const [rows] = await job.getQueryResults(); + + console.log('Query results:'); + rows.forEach(row => { + console.log(`name: ${row.name}, ${row.total_people} total people`); + }); + } + queryPagination(); + // [END bigquery_query_pagination] +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsArrays.js b/samples/queryParamsArrays.js new file mode 100644 index 00000000..9ebadc3d --- /dev/null +++ b/samples/queryParamsArrays.js @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_arrays] + // Run a query using array query parameters + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsArrays() { + // The SQL query to run + const sqlQuery = `SELECT name, sum(number) as count + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE gender = @gender + AND state IN UNNEST(@states) + GROUP BY name + ORDER BY count DESC + LIMIT 10;`; + + const options = { + query: sqlQuery, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + params: {gender: 'M', states: ['WA', 'WI', 'WV', 'WY']}, + }; + + // Run the query + const [rows] = await bigquery.query(options); + + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_params_arrays] + queryParamsArrays(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsNamed.js b/samples/queryParamsNamed.js new file mode 100644 index 00000000..1903ab0e --- /dev/null +++ b/samples/queryParamsNamed.js @@ -0,0 +1,49 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_named] + // Run a query using named query parameters + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsNamed() { + // The SQL query to run + const sqlQuery = `SELECT word, word_count + FROM \`bigquery-public-data.samples.shakespeare\` + WHERE corpus = @corpus + AND word_count >= @min_word_count + ORDER BY word_count DESC`; + + const options = { + query: sqlQuery, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + params: {corpus: 'romeoandjuliet', min_word_count: 250}, + }; + + // Run the query + const [rows] = await bigquery.query(options); + + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_params_named] + queryParamsNamed(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsNamedTypes.js b/samples/queryParamsNamedTypes.js new file mode 100644 index 00000000..f04359db --- /dev/null +++ b/samples/queryParamsNamedTypes.js @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_named_types] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsNamedTypes() { + // Run a query using named query parameters and provided parameter types. + + // The SQL query to run + const sqlQuery = `SELECT word, word_count + FROM \`bigquery-public-data.samples.shakespeare\` + WHERE word IN UNNEST(@wordList) + AND corpus = @corpus + AND word_count >= @minWordCount + ORDER BY word_count DESC`; + + const queryOptions = { + query: sqlQuery, + params: { + wordList: ['and', 'is', 'the', 'moon'], + corpus: 'romeoandjuliet', + minWordCount: 250, + }, + types: {wordList: ['STRING'], corpus: 'STRING', minWordCount: 'INT64'}, + }; + + // Run the query + const [rows] = await bigquery.query(queryOptions); + + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_params_named_types] + queryParamsNamedTypes(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsPositional.js b/samples/queryParamsPositional.js new file mode 100644 index 00000000..0bc1e8ff --- /dev/null +++ b/samples/queryParamsPositional.js @@ -0,0 +1,49 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_positional] + // Run a query using positional query parameters + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsPositional() { + // The SQL query to run + const sqlQuery = `SELECT word, word_count + FROM \`bigquery-public-data.samples.shakespeare\` + WHERE corpus = ? + AND word_count >= ? + ORDER BY word_count DESC`; + + const options = { + query: sqlQuery, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + params: ['romeoandjuliet', 250], + }; + + // Run the query + const [rows] = await bigquery.query(options); + + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_params_positional] + queryParamsPositional(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsPositionalTypes.js b/samples/queryParamsPositionalTypes.js new file mode 100644 index 00000000..80712fec --- /dev/null +++ b/samples/queryParamsPositionalTypes.js @@ -0,0 +1,49 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_positional_types] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsPositionalTypes() { + // Run a query using positional query parameters and provided parameter types. + + // The SQL query to run + const sqlQuery = `SELECT word, word_count + FROM \`bigquery-public-data.samples.shakespeare\` + WHERE word IN UNNEST(?) + AND corpus = ? + AND word_count >= ? + ORDER BY word_count DESC`; + + const queryOptions = { + query: sqlQuery, + params: [['and', 'is', 'the', 'moon'], 'romeoandjuliet', 250], + types: [['STRING'], 'STRING', 'INT64'], + }; + + // Run the query + const [rows] = await bigquery.query(queryOptions); + + console.log('Rows:'); + rows.forEach(row => console.log(row)); + } + // [END bigquery_query_params_positional_types] + queryParamsPositionalTypes(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsStructs.js b/samples/queryParamsStructs.js new file mode 100644 index 00000000..7a505f9d --- /dev/null +++ b/samples/queryParamsStructs.js @@ -0,0 +1,45 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_structs] + // Run a query using struct query parameters + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsStructs() { + // The SQL query to run + const sqlQuery = 'SELECT @struct_value AS struct_obj;'; + + const options = { + query: sqlQuery, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + params: {struct_value: {x: 1, y: 'foo'}}, + }; + + // Run the query + const [rows] = await bigquery.query(options); + + console.log('Rows:'); + rows.forEach(row => console.log(row.struct_obj.y)); + } + // [END bigquery_query_params_structs] + queryParamsStructs(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryParamsTimestamps.js b/samples/queryParamsTimestamps.js new file mode 100644 index 00000000..53221ccc --- /dev/null +++ b/samples/queryParamsTimestamps.js @@ -0,0 +1,45 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_query_params_timestamps] + // Run a query using timestamp parameters + + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function queryParamsTimestamps() { + // The SQL query to run + const sqlQuery = 'SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);'; + + const options = { + query: sqlQuery, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + params: {ts_value: new Date()}, + }; + + // Run the query + const [rows] = await bigquery.query(options); + + console.log('Rows:'); + rows.forEach(row => console.log(row.f0_)); + } + // [END bigquery_query_params_timestamps] + queryParamsTimestamps(); +} +main(...process.argv.slice(2)); diff --git a/samples/queryStackOverflow.js b/samples/queryStackOverflow.js new file mode 100644 index 00000000..63355cfc --- /dev/null +++ b/samples/queryStackOverflow.js @@ -0,0 +1,67 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_simple_app_all] + // [START bigquery_simple_app_deps] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + // [END bigquery_simple_app_deps] + + async function queryStackOverflow() { + // Queries a public Stack Overflow dataset. + + // [START bigquery_simple_app_client] + // Create a client + const bigqueryClient = new BigQuery(); + // [END bigquery_simple_app_client] + + // [START bigquery_simple_app_query] + // The SQL query to run + const sqlQuery = `SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM \`bigquery-public-data.stackoverflow.posts_questions\` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + LIMIT 10`; + + const options = { + query: sqlQuery, + // Location must match that of the dataset(s) referenced in the query. + location: 'US', + }; + + // Run the query + const [rows] = await bigqueryClient.query(options); + // [END bigquery_simple_app_query] + + // [START bigquery_simple_app_print] + console.log('Query Results:'); + rows.forEach(row => { + const url = row['url']; + const viewCount = row['view_count']; + console.log(`url: ${url}, ${viewCount} views`); + }); + // [END bigquery_simple_app_print] + } + queryStackOverflow(); + // [END bigquery_simple_app_all] +} + +main(...process.argv.slice(2)); diff --git a/samples/quickstart.js b/samples/quickstart.js index 736c62fc..d739a3dd 100644 --- a/samples/quickstart.js +++ b/samples/quickstart.js @@ -1,35 +1,35 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. 'use strict'; -async function createDataset( - datasetName = 'my_new_dataset' // Name for the new dataset -) { +function main(datasetName = 'my_new_dataset') { // [START bigquery_quickstart] // Imports the Google Cloud client library const {BigQuery} = require('@google-cloud/bigquery'); - // Creates a client - const bigquery = new BigQuery(); + async function createDataset() { + // Creates a client + const bigqueryClient = new BigQuery(); - // Create the dataset - const [dataset] = await bigquery.createDataset(datasetName); - console.log(`Dataset ${dataset.id} created.`); + // Create the dataset + const [dataset] = await bigqueryClient.createDataset(datasetName); + console.log(`Dataset ${dataset.id} created.`); + } + createDataset(); // [END bigquery_quickstart] } const args = process.argv.slice(2); -createDataset(...args).catch(console.error); +main(...args); diff --git a/samples/relaxColumn.js b/samples/relaxColumn.js new file mode 100644 index 00000000..4e6e0ca7 --- /dev/null +++ b/samples/relaxColumn.js @@ -0,0 +1,62 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_new_table' // Table to be created +) { + // [START bigquery_relax_column] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function relaxColumn() { + /** + * Changes columns from required to nullable. + * Assumes existing table with the following schema: + * [{name: 'Name', type: 'STRING', mode: 'REQUIRED'}, + * {name: 'Age', type: 'INTEGER'}, + * {name: 'Weight', type: 'FLOAT'}, + * {name: 'IsMagic', type: 'BOOLEAN'}]; + */ + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + const newSchema = [ + {name: 'Name', type: 'STRING', mode: 'NULLABLE'}, + {name: 'Age', type: 'INTEGER'}, + {name: 'Weight', type: 'FLOAT'}, + {name: 'IsMagic', type: 'BOOLEAN'}, + ]; + + // Retrieve current table metadata + const table = bigquery.dataset(datasetId).table(tableId); + const [metadata] = await table.getMetadata(); + + // Update schema + metadata.schema = newSchema; + const [apiResponse] = await table.setMetadata(metadata); + + console.log(apiResponse.schema.fields); + } + // [END bigquery_relax_column] + relaxColumn(); +} +main(...process.argv.slice(2)); diff --git a/samples/relaxColumnLoadAppend.js b/samples/relaxColumnLoadAppend.js new file mode 100644 index 00000000..e278e0df --- /dev/null +++ b/samples/relaxColumnLoadAppend.js @@ -0,0 +1,75 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', + tableId = 'my_table', + fileName = '/path/to/file.csv' +) { + // [START bigquery_relax_column_load_append] + // Import the Google Cloud client libraries + const {BigQuery} = require('@google-cloud/bigquery'); + + // Instantiate client + const bigquery = new BigQuery(); + + async function relaxColumnLoadAppend() { + // Changes required column to nullable in load append job. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const fileName = '/path/to/file.csv'; + // const datasetId = 'my_dataset'; + // const tableId = 'my_table'; + + // In this example, the existing table contains the 'Name' + // column as a 'REQUIRED' field. + const schema = 'Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN'; + + // Retrieve destination table reference + const [table] = await bigquery + .dataset(datasetId) + .table(tableId) + .get(); + const destinationTableRef = table.metadata.tableReference; + + // Set load job options + const options = { + schema: schema, + schemaUpdateOptions: ['ALLOW_FIELD_RELAXATION'], + writeDisposition: 'WRITE_APPEND', + destinationTable: destinationTableRef, + }; + + // Load data from a local file into the table + const [job] = await bigquery + .dataset(datasetId) + .table(tableId) + .load(fileName, options); + + console.log(`Job ${job.id} completed.`); + + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } + } + // [END bigquery_relax_column_load_append] + relaxColumnLoadAppend(); +} +main(...process.argv.slice(2)); diff --git a/samples/relaxColumnQueryAppend.js b/samples/relaxColumnQueryAppend.js new file mode 100644 index 00000000..40aa268e --- /dev/null +++ b/samples/relaxColumnQueryAppend.js @@ -0,0 +1,97 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + projectId = 'my_project', // GCP Project ID + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_table' // Existing table +) { + // [START bigquery_relax_column_query_append] + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function relaxColumnQueryAppend() { + // Change required to null in query append job + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = "my_project" + // const datasetId = "my_dataset" + // const tableId = "my_table" + + // Retrieve the destination table and checks the number of required fields. + const dataset = await bigquery.dataset(datasetId); + const table = await dataset.table(tableId); + const [metaData] = await table.getMetadata(); + + const requiredFields = metaData.schema.fields.filter( + ({mode}) => mode === 'REQUIRED' + ).length; + + console.log(`${requiredFields} fields in the schema are required.`); + + // Create destination table reference + const tableRef = { + projectId, + tableId, + datasetId, + }; + + /* Configure the query to append the results to a destination table, + * allowing field relaxation. In this example, the existing table + * contains 'age' as a required column. + * + * For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery + */ + const queryJobConfig = { + query: `SELECT age FROM \`${projectId}.${datasetId}.${tableId}\``, + destinationTable: tableRef, + schemaUpdateOptions: ['ALLOW_FIELD_RELAXATION'], + writeDisposition: ['WRITE_APPEND'], + useLegacySql: false, + }; + + // Configure the job. + const jobConfig = { + configuration: { + query: queryJobConfig, + }, + }; + + // Start the query, passing in the extra configuration. + const response = await bigquery.createJob(jobConfig); + const job = response[0]; + + // Wait for job to complete. + await job.getQueryResults(job); + + // Check the updated number of required fields. + const updatedTable = await dataset.table(tableId); + const [updatedMetaData] = await updatedTable.getMetadata(); + + const updatedRequiredFields = updatedMetaData.schema.fields.filter( + ({mode}) => mode === 'REQUIRED' + ).length; + + console.log( + `${updatedRequiredFields} fields in the schema are now required.` + ); + } + // [END bigquery_relax_column_query_append] + relaxColumnQueryAppend(); +} +main(...process.argv.slice(2)); diff --git a/samples/resources/partialdata.csv b/samples/resources/partialdata.csv new file mode 100644 index 00000000..1fe7765b --- /dev/null +++ b/samples/resources/partialdata.csv @@ -0,0 +1 @@ +2000,140.0,TRUE \ No newline at end of file diff --git a/samples/setUserAgent.js b/samples/setUserAgent.js new file mode 100644 index 00000000..aa627f27 --- /dev/null +++ b/samples/setUserAgent.js @@ -0,0 +1,28 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main() { + // [START bigquery_set_user_agent] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + // Create a client and set the user agent + const bigquery = new BigQuery({userAgent: 'my-user-agent'}); + + console.log('User agent:'); + console.log(bigquery.providedUserAgent); + // [END bigquery_set_user_agent] +} +main(...process.argv.slice(2)); diff --git a/samples/tables.js b/samples/tables.js deleted file mode 100644 index 317423e2..00000000 --- a/samples/tables.js +++ /dev/null @@ -1,1079 +0,0 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -async function createTable(datasetId, tableId, schema, projectId) { - // [START bigquery_create_table] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_new_dataset"; - // const tableId = "my_new_table"; - // const schema = "Name:string, Age:integer, Weight:float, IsMagic:boolean"; - - // Creates a client - const bigquery = new BigQuery({projectId}); - - // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource - const options = {schema}; - - // Create a new table in the dataset - const [table] = await bigquery - .dataset(datasetId) - .createTable(tableId, options); - - console.log(`Table ${table.id} created.`); - // [END bigquery_create_table] -} - -async function deleteTable(datasetId, tableId, projectId) { - // [START bigquery_delete_table] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - // Creates a client - const bigquery = new BigQuery({projectId}); - - // Deletes the table - await bigquery - .dataset(datasetId) - .table(tableId) - .delete(); - - console.log(`Table ${tableId} deleted.`); - // [END bigquery_delete_table] -} - -async function listTables(datasetId, projectId) { - // [START bigquery_list_tables] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - - // Creates a client - const bigquery = new BigQuery({projectId}); - - // Lists all tables in the dataset - const [tables] = await bigquery.dataset(datasetId).getTables(); - - console.log('Tables:'); - tables.forEach(table => console.log(table.id)); - // [END bigquery_list_tables] -} - -async function browseRows(datasetId, tableId, projectId) { - // [START bigquery_browse_table] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - // Creates a client - const bigquery = new BigQuery({projectId}); - - // Lists rows in the table - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - - console.log('Rows:'); - rows.forEach(row => console.log(row)); - // [END bigquery_browse_table] -} - -async function copyTable( - srcDatasetId, - srcTableId, - destDatasetId, - destTableId, - projectId -) { - // [START bigquery_copy_table] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const srcDatasetId = "my_src_dataset"; - // const srcTableId = "my_src_table"; - // const destDatasetId = "my_dest_dataset"; - // const destTableId = "my_dest_table"; - - // Creates a client - const bigquery = new BigQuery({projectId}); - - // Copies the table contents into another table - const [job] = await bigquery - .dataset(srcDatasetId) - .table(srcTableId) - .copy(bigquery.dataset(destDatasetId).table(destTableId)); - - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_copy_table] -} - -async function loadLocalFile(datasetId, tableId, filename, projectId) { - // [START bigquery_load_from_file] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const filename = "/path/to/file.csv"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - // Creates a client - const bigquery = new BigQuery({projectId}); - - // Loads data from a local file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(filename); - - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_from_file] -} - -async function loadORCFromGCS(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_orc] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the ORC file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.orc - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.orc'; - - // Instantiates clients - const bigquery = new BigQuery({projectId}); - const storage = new Storage({projectId}); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'ORC', - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_orc] -} - -async function loadParquetFromGCS(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_parquet] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the Parquet file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.parquet - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.parquet'; - - // Instantiates clients - const bigquery = new BigQuery({projectId}); - const storage = new Storage({projectId}); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'PARQUET', - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_parquet] -} - -function loadCSVFromGCS(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_csv] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the CSV file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.csv'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'CSV', - skipLeadingRows: 1, - schema: { - fields: [ - {name: 'name', type: 'STRING'}, - {name: 'post_abbr', type: 'STRING'}, - ], - }, - }; - - // Loads data from a Google Cloud Storage file into the table - bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); - // [END bigquery_load_table_gcs_csv] -} - -async function loadJSONFromGCS(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_json] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the json file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.json - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.json'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'NEWLINE_DELIMITED_JSON', - schema: { - fields: [ - {name: 'name', type: 'STRING'}, - {name: 'post_abbr', type: 'STRING'}, - ], - }, - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_json] -} - -async function loadCSVFromGCSAutodetect(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_csv_autodetect] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the CSV file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.csv'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'CSV', - skipLeadingRows: 1, - autodetect: true, - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_csv_autodetect] -} - -async function loadJSONFromGCSAutodetect(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_json_autodetect] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the JSON file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.json - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.json'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'NEWLINE_DELIMITED_JSON', - autodetect: true, - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_json_autodetect] -} - -async function loadCSVFromGCSTruncate(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_csv_truncate] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the CSV file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.csv'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'CSV', - skipLeadingRows: 1, - schema: { - fields: [ - {name: 'name', type: 'STRING'}, - {name: 'post_abbr', type: 'STRING'}, - ], - }, - // Set the write disposition to overwrite existing table data. - writeDisposition: 'WRITE_TRUNCATE', - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_csv_truncate] -} - -async function loadJSONFromGCSTruncate(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_json_truncate] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the JSON file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.json - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.json'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'NEWLINE_DELIMITED_JSON', - schema: { - fields: [ - {name: 'name', type: 'STRING'}, - {name: 'post_abbr', type: 'STRING'}, - ], - }, - // Set the write disposition to overwrite existing table data. - writeDisposition: 'WRITE_TRUNCATE', - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_json_truncate] -} - -async function loadParquetFromGCSTruncate(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_parquet_truncate] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the CSV file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.parquet'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'PARQUET', - // Set the write disposition to overwrite existing table data. - writeDisposition: 'WRITE_TRUNCATE', - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_parquet_truncate] -} - -async function loadOrcFromGCSTruncate(datasetId, tableId, projectId) { - // [START bigquery_load_table_gcs_orc_truncate] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - - /** - * This sample loads the CSV file at - * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - * - * TODO(developer): Replace the following lines with the path to your file. - */ - const bucketName = 'cloud-samples-data'; - const filename = 'bigquery/us-states/us-states.orc'; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Configure the load job. For full list of options, see: - // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load - const metadata = { - sourceFormat: 'ORC', - // Set the write disposition to overwrite existing table data. - writeDisposition: 'WRITE_TRUNCATE', - }; - - // Loads data from a Google Cloud Storage file into the table - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_load_table_gcs_orc_truncate] -} - -async function extractTableToGCS( - datasetId, - tableId, - bucketName, - filename, - projectId -) { - // [START bigquery_extract_table] - // Imports the Google Cloud client libraries - const {BigQuery} = require('@google-cloud/bigquery'); - const {Storage} = require('@google-cloud/storage'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - // const bucketName = "my-bucket"; - // const filename = "file.csv"; - - // Instantiates clients - const bigquery = new BigQuery({ - projectId: projectId, - }); - - const storage = new Storage({ - projectId: projectId, - }); - - // Exports data from the table into a Google Cloud Storage file - const [job] = await bigquery - .dataset(datasetId) - .table(tableId) - .extract(storage.bucket(bucketName).file(filename)); - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); - - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - // [END bigquery_extract_table] -} - -async function insertRowsAsStream(datasetId, tableId, rows, projectId) { - // [START bigquery_table_insert_rows] - // Imports the Google Cloud client library - const {BigQuery} = require('@google-cloud/bigquery'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = "your-project-id"; - // const datasetId = "my_dataset"; - // const tableId = "my_table"; - // const rows = [{name: "Tom", age: 30}, {name: "Jane", age: 32}]; - - // Creates a client - const bigquery = new BigQuery({ - projectId: projectId, - }); - - // Inserts data into a table - await bigquery - .dataset(datasetId) - .table(tableId) - .insert(rows); - console.log(`Inserted ${rows.length} rows`); - // [END bigquery_table_insert_rows] -} - -async function main() { - const fs = require(`fs`); - - require(`yargs`) - .demand(1) - .command( - `create `, - `Creates a new table.`, - {}, - opts => { - createTable(opts.datasetId, opts.tableId, opts.schema, opts.projectId); - } - ) - .command( - `list `, - `Lists all tables in a dataset.`, - {}, - opts => { - listTables(opts.datasetId, opts.projectId); - } - ) - .command( - `delete `, - `Deletes a table.`, - {}, - opts => { - deleteTable(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `copy `, - `Makes a copy of a table.`, - {}, - opts => { - copyTable( - opts.srcDatasetId, - opts.srcTableId, - opts.destDatasetId, - opts.destTableId, - opts.projectId - ); - } - ) - .command( - `browse `, - `Lists rows in a table.`, - {}, - opts => { - browseRows(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-local-csv `, - `Loads data from a local file into a table.`, - {}, - opts => { - loadLocalFile( - opts.datasetId, - opts.tableId, - opts.fileName, - opts.projectId - ); - } - ) - .command( - `load-gcs-orc `, - `Loads sample ORC data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadORCFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-parquet `, - `Loads sample Parquet data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadParquetFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-csv `, - `Loads sample CSV data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadCSVFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-json `, - `Loads sample JSON data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadJSONFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-csv-autodetect `, - `Loads sample CSV data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadCSVFromGCSAutodetect(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-json-autodetect `, - `Loads sample JSON data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadJSONFromGCSAutodetect(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-csv-truncate `, - `Loads sample CSV data from GCS, replacing an existing table.`, - {}, - opts => { - loadCSVFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-json-truncate `, - `Loads sample JSON data from GCS, replacing an existing table.`, - {}, - opts => { - loadJSONFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-parquet-truncate `, - `Loads sample Parquet data from GCS, replacing an existing table.`, - {}, - opts => { - loadParquetFromGCSTruncate( - opts.datasetId, - opts.tableId, - opts.projectId - ); - } - ) - .command( - `load-gcs-orc-truncate `, - `Loads sample Orc data from GCS, replacing an existing table.`, - {}, - opts => { - loadOrcFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `extract `, - `Extract a table from BigQuery to Google Cloud Storage.`, - {}, - opts => { - extractTableToGCS( - opts.datasetId, - opts.tableId, - opts.bucketName, - opts.fileName, - opts.projectId - ); - } - ) - .command( - `insert `, - `Insert a JSON array (as a string or newline-delimited file) into a BigQuery table.`, - {}, - opts => { - let content; - try { - content = fs.readFileSync(opts.json_or_file); - } catch (err) { - content = opts.json_or_file; - } - - let rows = null; - try { - rows = JSON.parse(content); - } catch (err) { - throw new Error( - `"json_or_file" (or the file it points to) is not a valid JSON array.` - ); - } - - if (!Array.isArray(rows)) { - throw new Error( - `"json_or_file" (or the file it points to) is not a valid JSON array.` - ); - } - - insertRowsAsStream( - opts.datasetId, - opts.tableId, - rows, - opts.projectId || process.env.GCLOUD_PROJECT - ); - } - ) - .example( - `node $0 create my-project-id my_dataset my_table "Name:string, Age:integer, Weight:float, IsMagic:boolean"`, - `Creates a new table named "my_table" in "my_dataset".` - ) - .example( - `node $0 list my-project-id my_dataset`, - `Lists tables in "my_dataset".` - ) - .example( - `node $0 browse my-project-id my_dataset my_table`, - `Displays rows from "my_table" in "my_dataset".` - ) - .example( - `node $0 delete my-project-id my_dataset my_table`, - `Deletes "my_table" from "my_dataset".` - ) - .example( - `node $0 load my-project-id my_dataset my_table ./data.csv`, - `Imports a local file into a table.` - ) - .example( - `node $0 load-gcs my-project-id my_dataset my_table my-bucket data.csv`, - `Imports a GCS file into a table.` - ) - .example( - `node $0 extract my-project-id my_dataset my_table my-bucket my-file`, - `Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV.` - ) - .example( - `node $0 extract my-project-id my_dataset my_table my-bucket my-file -f JSON --gzip`, - `Exports my_dataset:my_table to gcs://my-bucket/my-file as gzipped JSON.` - ) - .example( - `node $0 insert my-project-id my_dataset my_table json_string`, - `Inserts the JSON array represented by json_string into my_dataset:my_table.` - ) - .example( - `node $0 insert my-project-id my_dataset my_table json_file`, - `Inserts the JSON objects contained in json_file (one per line) into my_dataset:my_table.` - ) - .example( - `node $0 copy my-project-id src_dataset src_table dest_dataset dest_table`, - `Copies src_dataset:src_table to dest_dataset:dest_table.` - ) - .wrap(120) - .recommendCommands() - .epilogue( - `For more information, see https://cloud.google.com/bigquery/docs` - ) - .help() - .strict().argv; -} - -main().catch(console.error); diff --git a/samples/test/.eslintrc.yml b/samples/test/.eslintrc.yml deleted file mode 100644 index 6db2a46c..00000000 --- a/samples/test/.eslintrc.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -env: - mocha: true diff --git a/samples/test/authViewTutorial.test.js b/samples/test/authViewTutorial.test.js new file mode 100644 index 00000000..defd973c --- /dev/null +++ b/samples/test/authViewTutorial.test.js @@ -0,0 +1,75 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it, before, after} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const {BigQuery} = require('@google-cloud/bigquery'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_authView'; +const generateUuid = () => + `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); +let projectId; +const datasetId = generateUuid(); +const tableId = generateUuid(); +const sourceDatasetId = generateUuid(); +const sourceTableId = generateUuid(); +const sharedDatasetId = generateUuid(); +const sharedViewId = generateUuid(); + +const bigquery = new BigQuery(); + +describe('Authorized View Tutorial', () => { + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + await bigquery + .dataset(sourceDatasetId) + .delete({force: true}) + .catch(console.warn); + await bigquery + .dataset(sharedDatasetId) + .delete({force: true}) + .catch(console.warn); + }); + + before(async () => { + await bigquery.createDataset(datasetId); + const [tableData] = await bigquery.dataset(datasetId).createTable(tableId); + projectId = tableData.metadata.tableReference.projectId; + }); + + it('should create an authorized view', async () => { + const output = execSync( + `node authViewTutorial.js ${projectId} ${sourceDatasetId} ${sourceTableId} ${sharedDatasetId} ${sharedViewId}` + ); + assert.include( + output, + `View ${projectId}:${sharedDatasetId}.${sharedViewId} created.` + ); + const [exists] = await bigquery + .dataset(sharedDatasetId) + .table(sharedViewId) + .exists(); + assert.ok(exists); + }); +}); diff --git a/samples/test/clients.test.js b/samples/test/clients.test.js new file mode 100644 index 00000000..a81e5a7e --- /dev/null +++ b/samples/test/clients.test.js @@ -0,0 +1,29 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +describe('Client', () => { + it('should should set providedUserAgent', async () => { + const output = execSync('node setUserAgent.js'); + assert.match(output, /User agent:/); + assert.match(output, /my-user-agent/); + }); +}); diff --git a/samples/test/datasets.test.js b/samples/test/datasets.test.js index 3447efb9..93cf2011 100644 --- a/samples/test/datasets.test.js +++ b/samples/test/datasets.test.js @@ -1,54 +1,144 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. 'use strict'; const {BigQuery} = require('@google-cloud/bigquery'); const {assert} = require('chai'); -const execa = require('execa'); +const {describe, it, after, before} = require('mocha'); +const cp = require('child_process'); const uuid = require('uuid'); -const exec = async cmd => (await execa.shell(cmd)).stdout; -const DATASET_ID = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); +const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests'; +const datasetId = `${GCLOUD_TESTS_PREFIX}_datasets_${uuid.v4()}`.replace( + /-/gi, + '_' +); + const bigquery = new BigQuery(); -describe(`Datasets`, () => { +describe('Datasets', () => { + before(async () => { + // Delete any stale datasets from samples tests + await deleteDatasets(); + }); + after(async () => { await bigquery - .dataset(DATASET_ID) + .dataset(datasetId) .delete({force: true}) .catch(console.warn); }); - it(`should create a dataset`, async () => { - const output = await exec(`node createDataset.js ${DATASET_ID}`); - assert.strictEqual(output, `Dataset ${DATASET_ID} created.`); - const [exists] = await bigquery.dataset(DATASET_ID).exists(); + it('should create a dataset', async () => { + const output = execSync(`node createDataset.js ${datasetId}`); + assert.include(output, `Dataset ${datasetId} created.`); + const [exists] = await bigquery.dataset(datasetId).exists(); assert.ok(exists); }); - it(`should list datasets`, async () => { - const output = await exec(`node listDatasets.js`); + it('should list datasets', async () => { + const output = execSync('node listDatasets.js'); + assert.match(output, /Datasets:/); + assert.match(output, new RegExp(datasetId)); + }); + + it('should retrieve a dataset if it exists', async () => { + const output = execSync(`node getDataset.js ${datasetId}`); + assert.include(output, 'Dataset:'); + assert.include(output, datasetId); + }); + + it("should update dataset's description", async () => { + const output = execSync(`node updateDatasetDescription.js ${datasetId}`); + assert.include( + output, + `${datasetId} description: New dataset description.` + ); + }); + + it("should update dataset's expiration", async () => { + const output = execSync(`node updateDatasetExpiration.js ${datasetId}`); + assert.include(output, `${datasetId} expiration: 86400000`); + }); + + it('should add label to a dataset', async () => { + const output = execSync(`node labelDataset.js ${datasetId}`); + assert.include(output, `${datasetId} labels:`); + assert.include(output, "{ color: 'green' }"); + }); + + it("should list a dataset's labels", async () => { + const output = execSync(`node getDatasetLabels.js ${datasetId}`); + assert.include(output, `${datasetId} Labels:`); + assert.include(output, 'color: green'); + }); + + it('should delete a label from a dataset', async () => { + const output = execSync(`node deleteLabelDataset.js ${datasetId}`); + assert.include(output, `${datasetId} labels:`); + assert.include(output, 'undefined'); + }); + + it("should update dataset's access", async () => { + const output = execSync(`node updateDatasetAccess.js ${datasetId}`); + assert.include(output, "role: 'READER'"); + assert.include(output, "userByEmail: 'sample.bigquery.dev@gmail.com'"); + }); + + it('should filter datasets by label', async () => { + execSync(`node labelDataset.js ${datasetId}`); + const output = execSync('node listDatasetsByLabel.js'); assert.match(output, /Datasets:/); - assert.match(output, new RegExp(DATASET_ID)); + assert.match(output, new RegExp(datasetId)); }); - it(`should delete a dataset`, async () => { - const output = await exec(`node deleteDataset.js ${DATASET_ID}`); - assert.strictEqual(output, `Dataset ${DATASET_ID} deleted.`); - const [exists] = await bigquery.dataset(DATASET_ID).exists(); + it('should delete a dataset', async () => { + const output = execSync(`node deleteDataset.js ${datasetId}`); + assert.include(output, `Dataset ${datasetId} deleted.`); + const [exists] = await bigquery.dataset(datasetId).exists(); assert.strictEqual(exists, false); }); + + // Only delete a resource if it is older than 24 hours. That will prevent + // collisions with parallel CI test runs. + function isResourceStale(creationTime) { + const oneDayMs = 86400000; + const now = new Date(); + const created = new Date(creationTime); + return now.getTime() - created.getTime() >= oneDayMs; + } + + async function deleteDatasets() { + let [datasets] = await bigquery.getDatasets(); + datasets = datasets.filter(dataset => + dataset.id.includes(GCLOUD_TESTS_PREFIX) + ); + + for (const dataset of datasets) { + const [metadata] = await dataset.getMetadata(); + const creationTime = Number(metadata.creationTime); + + if (isResourceStale(creationTime)) { + try { + await dataset.delete({force: true}); + } catch (e) { + console.log(`dataset(${dataset.id}).delete() failed`); + console.log(e); + } + } + } + } }); diff --git a/samples/test/jobs.test.js b/samples/test/jobs.test.js new file mode 100644 index 00000000..068117d9 --- /dev/null +++ b/samples/test/jobs.test.js @@ -0,0 +1,68 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {BigQuery} = require('@google-cloud/bigquery'); +const {assert} = require('chai'); +const {describe, it, before} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const bigquery = new BigQuery(); +let jobId; + +describe('Jobs', () => { + before(async () => { + const query = `SELECT name + FROM \`bigquery-public-data.usa_names.usa_1910_2013\` + WHERE state = 'TX' + LIMIT 100`; + + const queryOptions = { + query: query, + }; + + const [job] = await bigquery.createQueryJob(queryOptions); + jobId = job.metadata.jobReference.jobId; + }); + + it('should list jobs', async () => { + const output = execSync('node listJobs.js'); + assert.match(output, /Jobs:/); + assert.include(output, jobId); + }); + + it('should retrieve a job', async () => { + const output = execSync(`node getJob.js ${jobId}`); + assert.include(output, `jobId: '${jobId}'`); + }); + + it('should attempt to cancel a job', async () => { + const output = execSync(`node cancelJob.js ${jobId}`); + assert.include(output, 'state:'); + }); + + it('should create a job', async () => { + const output = execSync('node createJob.js'); + assert.include(output, 'Rows:'); + }); + + it('should auto-paginate through query result rows', async () => { + const output = execSync('node queryPagination.js'); + assert.match(output, /name/); + assert.match(output, /total people/); + }); +}); diff --git a/samples/test/models.test.js b/samples/test/models.test.js new file mode 100644 index 00000000..17fe1750 --- /dev/null +++ b/samples/test/models.test.js @@ -0,0 +1,138 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {BigQuery} = require('@google-cloud/bigquery'); +const {assert} = require('chai'); +const {describe, it, before, after} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_models'; + +const bigquery = new BigQuery(); + +describe('Models', () => { + const datasetId = `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + const modelId = `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + + before(async () => { + const query = `CREATE OR REPLACE MODEL \`${datasetId}.${modelId}\` + OPTIONS(model_type='logistic_reg') AS + SELECT + IF(totals.transactions IS NULL, 0, 1) AS label, + IFNULL(device.operatingSystem, "") AS os, + device.isMobile AS is_mobile, + IFNULL(geoNetwork.country, "") AS country, + IFNULL(totals.pageviews, 0) AS pageviews + FROM + \`bigquery-public-data.google_analytics_sample.ga_sessions_*\` + WHERE + _TABLE_SUFFIX BETWEEN '20160801' AND '20170631' + LIMIT 100000;`; + + const datasetOptions = { + location: 'US', + }; + + const queryOptions = { + query: query, + }; + + await bigquery.createDataset(datasetId, datasetOptions); + + // Run query to create a model + const [job] = await bigquery.createQueryJob(queryOptions); + + // Wait for the query to finish + await job.getQueryResults(); + }); + + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + }); + + it('should retrieve a model if it exists', async () => { + const output = execSync(`node getModel.js ${datasetId} ${modelId}`); + assert.include(output, 'Model:'); + assert.include(output, datasetId && modelId); + }); + + it('should list models', async () => { + const output = execSync(`node listModels.js ${datasetId}`); + assert.include(output, 'Models:'); + assert.include(output, datasetId); + }); + + it('should list models streaming', async () => { + const output = execSync(`node getModel.js ${datasetId} ${modelId}`); + assert.include(output, modelId); + }); + + it("should update model's metadata", async () => { + const output = execSync(`node updateModel.js ${datasetId} ${modelId}`); + assert.include(output, `${modelId} description: A really great model.`); + }); +}); + +describe('Create/Delete Model', () => { + const datasetId = `${GCLOUD_TESTS_PREFIX}_delete_${uuid.v4()}`.replace( + /-/gi, + '_' + ); + const modelId = `${GCLOUD_TESTS_PREFIX}_delete_${uuid.v4()}`.replace( + /-/gi, + '_' + ); + + before(async () => { + const datasetOptions = { + location: 'US', + }; + await bigquery.createDataset(datasetId, datasetOptions); + }); + + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + }); + + it('should create a model', async () => { + const output = execSync(`node createModel.js ${datasetId} ${modelId}`); + assert.include(output, `Model ${modelId} created.`); + const [exists] = await bigquery + .dataset(datasetId) + .model(modelId) + .exists(); + assert.strictEqual(exists, true); + }); + + it('should delete a model', async () => { + const output = execSync(`node deleteModel.js ${datasetId} ${modelId}`); + assert.include(output, `Model ${modelId} deleted.`); + const [exists] = await bigquery + .dataset(datasetId) + .model(modelId) + .exists(); + assert.strictEqual(exists, false); + }); +}); diff --git a/samples/test/queries.test.js b/samples/test/queries.test.js index 541e13b0..484fee22 100644 --- a/samples/test/queries.test.js +++ b/samples/test/queries.test.js @@ -1,46 +1,199 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. 'use strict'; const {assert} = require('chai'); -const execa = require('execa'); - -const cmd = `node queries.js`; -const exec = async cmd => { - const res = await execa.shell(cmd); - assert.isEmpty(res.stderr); - return res.stdout; -}; - -describe(`Queries`, () => { - it(`should query stackoverflow`, async () => { - const output = await exec(`${cmd} stackoverflow`); +const {describe, it, before, after} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const {BigQuery} = require('@google-cloud/bigquery'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_queries'; + +const generateUuid = () => + `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + +const datasetId = generateUuid(); +const tableId = generateUuid(); +const destTableId = generateUuid(); +const routineId = generateUuid(); +let projectId; + +const bigquery = new BigQuery(); + +describe('Queries', () => { + before(async () => { + const schema = [{name: 'age', type: 'STRING', mode: 'REQUIRED'}]; + const options = { + schema: schema, + }; + await bigquery.createDataset(datasetId); + await bigquery.dataset(datasetId).createTable(destTableId); + const [tableData] = await bigquery + .dataset(datasetId) + .createTable(tableId, options); + projectId = tableData.metadata.tableReference.projectId; + }); + + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + }); + + it('should query stackoverflow', async () => { + const output = execSync('node queryStackOverflow.js'); assert.match(output, /Query Results:/); assert.match(output, /views/); }); - it(`should run a query`, async () => { - const output = await exec(`${cmd} query`); + it('should run a query', async () => { + const output = execSync('node query.js'); assert.match(output, /Rows:/); assert.match(output, /name/); }); - it(`should run a query with the cache disabled`, async () => { - const output = await exec(`${cmd} disable-cache`); + it('should run a query as a dry run', async () => { + const output = execSync('node queryDryRun.js'); + assert.match(output, /Status:/); + assert.include(output, '\nJob Statistics:'); + assert.include(output, 'DONE'); + assert.include(output, 'totalBytesProcessed:'); + }); + + it('should run a query with the cache disabled', async () => { + const output = execSync('node queryDisableCache.js'); assert.match(output, /Rows:/); assert.match(output, /corpus/); }); + + it('should run a query with named params', async () => { + const output = execSync('node queryParamsNamed.js'); + assert.match(output, /Rows:/); + assert.match(output, /word_count/); + }); + + it('should run a query with named params and provided types', async () => { + const output = execSync('node queryParamsNamedTypes.js'); + assert.match(output, /Rows:/); + assert.match(output, /word/); + }); + + it('should run a query with positional params', async () => { + const output = execSync('node queryParamsPositional.js'); + assert.match(output, /Rows:/); + assert.match(output, /word_count/); + }); + + it('should run a query with positional params and provided types', async () => { + const output = execSync('node queryParamsPositionalTypes.js'); + assert.match(output, /Rows:/); + assert.match(output, /word/); + }); + + it('should run a query with struct params', async () => { + const output = execSync('node queryParamsStructs.js'); + assert.match(output, /Rows:/); + assert.match(output, /foo/); + }); + + it('should run a query with array params', async () => { + const output = execSync('node queryParamsArrays.js'); + assert.match(output, /Rows:/); + assert.match(output, /count/); + }); + + it('should run a query with timestamp params', async () => { + const output = execSync('node queryParamsTimestamps.js'); + assert.match(output, /Rows:/); + assert.match(output, /BigQueryTimestamp/); + }); + + it('should run a query with a destination table', async () => { + const output = execSync( + `node queryDestinationTable.js ${datasetId} ${tableId}` + ); + assert.include(output, `Query results loaded to table ${tableId}`); + }); + + it('should run a query with legacy SQL', async () => { + const output = execSync('node queryLegacy.js'); + assert.match(output, /Rows:/); + assert.match(output, /word/); + }); + + it('should run a query with legacy SQL and large results', async () => { + const destTableId = generateUuid(); + const output = execSync( + `node queryLegacyLargeResults.js ${datasetId} ${destTableId} ${projectId}` + ); + assert.match(output, /Rows:/); + assert.match(output, /word/); + }); + + it('should add a new column via a query job', async () => { + const destTableId = generateUuid(); + execSync(`node createTable.js ${datasetId} ${destTableId} 'name:STRING'`); + const output = execSync( + `node addColumnQueryAppend.js ${datasetId} ${destTableId}` + ); + assert.match(output, /completed\./); + const [rows] = await bigquery + .dataset(datasetId) + .table(destTableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it('should relax columns via a query job', async () => { + const output = execSync( + `node relaxColumnQueryAppend.js ${projectId} ${datasetId} ${tableId}` + ); + + assert.match(output, /1 fields in the schema are required\./); + assert.match(output, /0 fields in the schema are now required\./); + }); + + it('should run a query at batch priority', async () => { + const output = execSync('node queryBatch.js'); + assert.match(output, /Job/); + assert.match(output, /is currently in state/); + }); + + it('should create a view via DDL query', async () => { + const output = execSync(`node ddlCreateView.js ${projectId} ${datasetId}`); + assert.match(output, /Created new view/); + }); + + it('should query an external data source', async () => { + const permTableId = generateUuid(); + const output = execSync( + `node queryExternalGCSPerm.js ${datasetId} ${permTableId}` + ); + assert.match(output, /Rows:/); + assert.match(output, /post_abbr/); + }); + + it('should create a routine using DDL', async () => { + const output = execSync( + `node createRoutineDDL.js ${projectId} ${datasetId} ${routineId}` + ); + assert.include(output, `Routine ${routineId} created.`); + }); }); diff --git a/samples/test/quickstart.test.js b/samples/test/quickstart.test.js index 51528ff8..4428a822 100644 --- a/samples/test/quickstart.test.js +++ b/samples/test/quickstart.test.js @@ -1,41 +1,41 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. 'use strict'; const {assert} = require('chai'); +const {describe, it, after} = require('mocha'); const uuid = require('uuid'); -const execa = require('execa'); +const cp = require('child_process'); const {BigQuery} = require('@google-cloud/bigquery'); +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + const bigquery = new BigQuery(); -const exec = async cmd => { - const res = await execa.shell(cmd); - assert.isEmpty(res.stderr); - return res.stdout; -}; describe('Quickstart', () => { - const datasetName = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + const datasetName = `nodejs_samples_tests_quickstart_${uuid.v4()}`.replace( + /-/gi, + '_' + ); after(async () => { await bigquery.dataset(datasetName).delete({force: true}); }); it('quickstart should create a dataset', async () => { - const output = await exec(`node quickstart ${datasetName}`); - assert.strictEqual(output, `Dataset ${datasetName} created.`); + const output = execSync(`node quickstart ${datasetName}`); + assert.include(output, `Dataset ${datasetName} created.`); }); }); diff --git a/samples/test/routines.test.js b/samples/test/routines.test.js new file mode 100644 index 00000000..ec40b01b --- /dev/null +++ b/samples/test/routines.test.js @@ -0,0 +1,136 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it, before, after} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const {BigQuery} = require('@google-cloud/bigquery'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_routines'; + +const generateUuid = () => + `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + +const datasetId = generateUuid(); +const routineId = generateUuid(); +const newRoutineId = generateUuid(); + +const bigquery = new BigQuery(); + +describe('Routines', () => { + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + }); + + before(async () => { + await bigquery.createDataset(datasetId); + const dataset = bigquery.dataset(datasetId); + const routine = dataset.routine(routineId); + const config = { + arguments: [ + { + name: 'x', + dataType: { + typeKind: 'INT64', + }, + }, + ], + definitionBody: 'x * 3', + routineType: 'SCALAR_FUNCTION', + returnType: { + typeKind: 'INT64', + }, + }; + await routine.create(config); + }); + + it('should create a routine', async () => { + const output = execSync( + `node createRoutine.js ${datasetId} ${newRoutineId}` + ); + assert.include(output, `Routine ${newRoutineId} created.`); + }); + + it('should get a routine', async () => { + const output = execSync(`node getRoutine.js ${datasetId} ${routineId}`); + assert.include(output, `Routine ${routineId} retrieved.`); + }); + + it('should list routines', async () => { + const output = execSync(`node listRoutines.js ${datasetId}`); + assert.match(output, /Routines:/); + assert.include(output, routineId); + }); + + it('should update routine', async () => { + const output = execSync(`node updateRoutine.js ${datasetId} ${routineId}`); + assert.include(output, 'Routine description: New description'); + }); + + describe('Delete Routine', () => { + const datasetId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + const routineId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + + before(async () => { + await bigquery.createDataset(datasetId); + const dataset = bigquery.dataset(datasetId); + // Creates a new routine in the dataset + const routine = dataset.routine(routineId); + const config = { + arguments: [ + { + name: 'x', + dataType: { + typeKind: 'INT64', + }, + }, + ], + definitionBody: 'x * 3', + routineType: 'SCALAR_FUNCTION', + returnType: { + typeKind: 'INT64', + }, + }; + await routine.create(config); + }); + + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + }); + + it('should delete a routine', async () => { + const output = execSync( + `node deleteRoutine.js ${datasetId} ${routineId}` + ); + assert.include(output, `Routine ${routineId} deleted.`); + const [exists] = await bigquery + .dataset(datasetId) + .routine(routineId) + .exists(); + assert.strictEqual(exists, false); + }); + }); +}); diff --git a/samples/test/tables.test.js b/samples/test/tables.test.js index 12e9d11d..d6078bf5 100644 --- a/samples/test/tables.test.js +++ b/samples/test/tables.test.js @@ -1,58 +1,59 @@ -/** - * Copyright 2017, Google, Inc. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. 'use strict'; const {assert} = require('chai'); +const {describe, it, before, after, beforeEach} = require('mocha'); const path = require('path'); const uuid = require('uuid'); -const execa = require('execa'); +const cp = require('child_process'); const {Storage} = require('@google-cloud/storage'); const {BigQuery} = require('@google-cloud/bigquery'); +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests'; + const storage = new Storage(); -const exec = async cmd => { - const res = await execa.shell(cmd); - assert.isEmpty(res.stderr); - return res.stdout; -}; -const cmd = `node tables.js`; -const generateUuid = () => `gcloud-tests-${uuid.v4()}`.replace(/-/gi, '_'); + +const generateUuid = () => + `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); const datasetId = generateUuid(); const srcDatasetId = datasetId; const destDatasetId = generateUuid(); const tableId = generateUuid(); +const nestedTableId = generateUuid(); +const partitionedTableId = generateUuid(); const srcTableId = tableId; const destTableId = generateUuid(); -const schema = `Name:string, Age:integer, Weight:float, IsMagic:boolean`; +const viewId = generateUuid(); const bucketName = generateUuid(); -const exportFileName = `data.json`; -const importFileName = `data.avro`; +const exportCSVFileName = 'data.json'; +const exportJSONFileName = 'data.json'; +const importFileName = 'data.avro'; +const partialDataFileName = 'partialdata.csv'; const localFilePath = path.join(__dirname, `../resources/${importFileName}`); -const rows = [ - {Name: `foo`, Age: 27, Weight: 80.3, IsMagic: true}, - {Name: `bar`, Age: 13, Weight: 54.6, IsMagic: false}, -]; - +const partialDataFilePath = path.join( + __dirname, + `../resources/${partialDataFileName}` +); const bigquery = new BigQuery(); describe('Tables', () => { - let projectId; before(async () => { - projectId = await bigquery.getProjectId(); const [bucket] = await storage.createBucket(bucketName); await Promise.all([ bucket.upload(localFilePath), @@ -61,6 +62,9 @@ describe('Tables', () => { ]); }); + // to avoid getting rate limited + beforeEach(done => setTimeout(done, 500)); + after(async () => { await bigquery .dataset(srcDatasetId) @@ -70,6 +74,10 @@ describe('Tables', () => { .dataset(destDatasetId) .delete({force: true}) .catch(console.warn); + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); await storage .bucket(bucketName) .deleteFiles({force: true}) @@ -88,11 +96,9 @@ describe('Tables', () => { .catch(console.warn); }); - it(`should create a table`, async () => { - const output = await exec( - `${cmd} create ${projectId} ${datasetId} ${tableId} "${schema}"` - ); - assert.strictEqual(output, `Table ${tableId} created.`); + it('should create a table', async () => { + const output = execSync(`node createTable.js ${datasetId} ${tableId}`); + assert.include(output, `Table ${tableId} created.`); const [exists] = await bigquery .dataset(datasetId) .table(tableId) @@ -100,15 +106,100 @@ describe('Tables', () => { assert.ok(exists); }); - it(`should list tables`, async () => { - const output = await exec(`${cmd} list ${projectId} ${datasetId}`); + it('should create a partitioned table', async () => { + const output = execSync( + `node createTablePartitioned.js ${datasetId} ${partitionedTableId}` + ); + assert.include( + output, + `Table ${partitionedTableId} created with partitioning:` + ); + assert.include(output, "type: 'DAY'"); + assert.include(output, "field: 'date'"); + const [exists] = await bigquery + .dataset(datasetId) + .table(partitionedTableId) + .exists(); + assert.ok(exists); + }); + + it('should create an integer range partitioned table', async () => { + const rangePartTableId = generateUuid(); + const output = execSync( + `node createTableRangePartitioned.js ${datasetId} ${rangePartTableId}` + ); + assert.include( + output, + `Table ${rangePartTableId} created with integer range partitioning:` + ); + assert.include( + output, + "range: { start: '0', end: '100000', interval: '10' }" + ); + const [exists] = await bigquery + .dataset(datasetId) + .table(rangePartTableId) + .exists(); + assert.ok(exists); + }); + + it('should create a table with nested schema', async () => { + const output = execSync( + `node nestedRepeatedSchema.js ${datasetId} ${nestedTableId}` + ); + assert.include(output, `Table ${nestedTableId} created.`); + const [exists] = await bigquery + .dataset(datasetId) + .table(nestedTableId) + .exists(); + assert.ok(exists); + }); + + it('should retrieve a table if it exists', async () => { + const output = execSync(`node getTable.js ${datasetId} ${tableId}`); + assert.include(output, 'Table:'); + assert.include(output, datasetId); + assert.include(output, tableId); + }); + + it('should list tables', async () => { + const output = execSync(`node listTables.js ${datasetId}`); assert.match(output, /Tables:/); assert.match(output, new RegExp(tableId)); }); - it(`should load a local CSV file`, async () => { - const output = await exec( - `${cmd} load-local-csv ${projectId} ${datasetId} ${tableId} ${localFilePath}` + it("should update table's description", async () => { + const output = execSync( + `node updateTableDescription.js ${datasetId} ${tableId}` + ); + assert.include(output, `${tableId} description: New table description.`); + }); + + it("should update table's expiration", async () => { + const currentTime = Date.now(); + const expirationTime = currentTime + 1000 * 60 * 60 * 24 * 5; + const output = execSync( + `node updateTableExpiration.js ${datasetId} ${tableId} ${expirationTime}` + ); + assert.include(output, `${tableId}`); + assert.include(output, `expiration: ${expirationTime}`); + }); + + it('should add label to a table', async () => { + const output = execSync(`node labelTable.js ${datasetId} ${tableId}`); + assert.include(output, `${tableId} labels:`); + assert.include(output, "{ color: 'green' }"); + }); + + it('should delete a label from a table', async () => { + const output = execSync(`node deleteLabelTable.js ${datasetId} ${tableId}`); + assert.include(output, `${tableId} labels:`); + assert.include(output, 'undefined'); + }); + + it('should load a local CSV file', async () => { + const output = execSync( + `node loadLocalFile.js ${datasetId} ${tableId} ${localFilePath}` ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -118,34 +209,57 @@ describe('Tables', () => { assert.strictEqual(rows.length, 1); }); - it(`should browse table rows`, async () => { - const output = await exec( - `${cmd} browse ${projectId} ${datasetId} ${tableId}` + it('should browse table rows', async () => { + const browseDestTable = generateUuid(); + const output = execSync( + `node browseTable.js ${datasetId} ${browseDestTable}` ); - assert.strictEqual( - output, - `Rows:\n{ Name: 'Gandalf', Age: 2000, Weight: 140, IsMagic: true }` + assert.match(output, /name/); + assert.match(output, /total people/); + }); + + it('should extract a table to GCS CSV file', async () => { + const output = execSync( + `node extractTableToGCS.js ${datasetId} ${tableId} ${bucketName} ${exportCSVFileName}` ); + + assert.match(output, /created\./); + const [exists] = await storage + .bucket(bucketName) + .file(exportCSVFileName) + .exists(); + assert.ok(exists); }); - it(`should extract a table to GCS`, async () => { - const output = await exec( - `${cmd} extract ${projectId} ${datasetId} ${tableId} ${bucketName} ${exportFileName}` + it('should extract a table to GCS JSON file', async () => { + const output = execSync( + `node extractTableJSON.js ${datasetId} ${tableId} ${bucketName} ${exportJSONFileName}` ); - assert.match(output, /completed\./); + assert.match(output, /created\./); const [exists] = await storage .bucket(bucketName) - .file(exportFileName) + .file(exportJSONFileName) .exists(); assert.ok(exists); }); - it(`should load a GCS ORC file`, async () => { - const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-orc ${projectId} ${datasetId} ${tableId}` + it('should extract a table to GCS compressed file', async () => { + const output = execSync( + `node extractTableCompressed.js ${datasetId} ${tableId} ${bucketName} ${exportCSVFileName}` ); + + assert.match(output, /created\./); + const [exists] = await storage + .bucket(bucketName) + .file(exportCSVFileName) + .exists(); + assert.ok(exists); + }); + + it('should load a GCS ORC file', async () => { + const tableId = generateUuid(); + const output = execSync(`node loadTableGCSORC.js ${datasetId} ${tableId}`); assert.match(output, /completed\./); const [rows] = await bigquery .dataset(datasetId) @@ -154,10 +268,10 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS Parquet file`, async () => { + it('should load a GCS Parquet file', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-parquet ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadTableGCSParquet.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -167,10 +281,73 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS CSV file with explicit schema`, async () => { + it('should load a GCS Avro file', async () => { + const tableId = generateUuid(); + const output = execSync(`node loadTableGCSAvro.js ${datasetId} ${tableId}`); + assert.match(output, /completed\./); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it('should load a GCS CSV file with explicit schema', async () => { + const tableId = generateUuid(); + const output = execSync(`node loadCSVFromGCS.js ${datasetId} ${tableId}`); + assert.match(output, /completed\./); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it('should load a GCS JSON file with explicit schema', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-csv ${projectId} ${datasetId} ${tableId}` + const output = execSync(`node loadJSONFromGCS.js ${datasetId} ${tableId}`); + assert.match(output, /completed\./); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it('should load a GCS CSV file to partitioned table', async () => { + const tableId = generateUuid(); + const output = execSync( + `node loadTablePartitioned.js ${datasetId} ${tableId}` + ); + assert.match(output, /completed\./); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it('should add a new column via a GCS file load job', async () => { + const destTableId = generateUuid(); + execSync( + `node createTable.js ${datasetId} ${destTableId} 'Name:STRING, Age:INTEGER, Weight:FLOAT'` + ); + const output = execSync( + `node addColumnLoadAppend.js ${datasetId} ${destTableId} ${localFilePath}` + ); + assert.match(output, /completed\./); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it('should relax a column via a GCS file load job', async () => { + const destTableId = generateUuid(); + execSync(`node createTable.js ${datasetId} ${destTableId}`); + const output = execSync( + `node relaxColumnLoadAppend.js ${datasetId} ${destTableId} ${partialDataFilePath}` ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -180,10 +357,10 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS JSON file with explicit schema`, async () => { + it('should load a GCS CSV file with autodetected schema', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-json ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadCSVFromGCSAutodetect.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -193,10 +370,10 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS CSV file with autodetected schema`, async () => { + it('should load a GCS JSON file with autodetected schema', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-csv-autodetect ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadJSONFromGCSAutodetect.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -206,12 +383,13 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS JSON file with autodetected schema`, async () => { + it('should load a GCS CSV file truncate table', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-json-autodetect ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadCSVFromGCSTruncate.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); + assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); const [rows] = await bigquery .dataset(datasetId) .table(tableId) @@ -219,12 +397,13 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS CSV file truncate table`, async () => { + it('should load a GCS JSON file truncate table', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-csv-truncate ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadJSONFromGCSTruncate.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); + assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); const [rows] = await bigquery .dataset(datasetId) .table(tableId) @@ -232,12 +411,13 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS JSON file truncate table`, async () => { + it('should load a GCS parquet file truncate table', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-json-truncate ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadParquetFromGCSTruncate.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); + assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); const [rows] = await bigquery .dataset(datasetId) .table(tableId) @@ -245,12 +425,13 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS parquet file truncate table`, async () => { + it('should load a GCS ORC file truncate table', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-parquet-truncate ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadOrcFromGCSTruncate.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); + assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); const [rows] = await bigquery .dataset(datasetId) .table(tableId) @@ -258,12 +439,13 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should load a GCS ORC file truncate table`, async () => { + it('should load a GCS Avro file truncate table', async () => { const tableId = generateUuid(); - const output = await exec( - `${cmd} load-gcs-orc-truncate ${projectId} ${datasetId} ${tableId}` + const output = execSync( + `node loadTableGCSAvroTruncate.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); + assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); const [rows] = await bigquery .dataset(datasetId) .table(tableId) @@ -271,9 +453,9 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should copy a table`, async () => { - const output = await exec( - `${cmd} copy ${projectId} ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}` + it('should copy a table', async () => { + const output = execSync( + `node copyTable.js ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}` ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -283,31 +465,127 @@ describe('Tables', () => { assert.ok(rows.length > 0); }); - it(`should insert rows`, async () => { - const res = await execa.shell( - `${cmd} insert ${projectId} ${datasetId} ${tableId} 'foo.bar'` - ); - assert.match( - res.stderr, - /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ + it('should insert rows', async () => { + const output = execSync( + `node insertRowsAsStream.js ${datasetId} ${tableId}` ); - const output = await exec( - `${cmd} insert ${projectId} ${datasetId} ${tableId} '${JSON.stringify( - rows - )}'` + assert.match(output, /Inserted 2 rows/); + }); + + it('should insert rows with supported data types', async () => { + const typesTableId = generateUuid(); + const output = execSync( + `node insertingDataTypes.js ${datasetId} ${typesTableId}` ); assert.match(output, /Inserted 2 rows/); }); - it(`should delete a table`, async () => { - const output = await exec( - `${cmd} delete ${projectId} ${datasetId} ${tableId}` + it('copy multiple source tables to a given destination', async () => { + execSync(`node createTable.js ${datasetId} destinationTable`); + const output = execSync( + `node copyTableMultipleSource.js ${datasetId} ${tableId} destinationTable` ); - assert.strictEqual(output, `Table ${tableId} deleted.`); - const [exists] = await bigquery - .dataset(datasetId) - .table(tableId) - .exists(); - assert.strictEqual(exists, false); + assert.include(output, 'sourceTable'); + assert.include(output, 'destinationTable'); + assert.include(output, 'createDisposition'); + assert.include(output, 'writeDisposition'); + }); + + it('should add a column to the schema', async () => { + const column = "name: 'size', type: 'STRING'"; + const output = execSync(`node addEmptyColumn.js ${datasetId} ${tableId}`); + assert.include(output, column); + }); + + it("should update a column from 'REQUIRED' TO 'NULLABLE'", async () => { + const column = "name: 'Name', type: 'STRING', mode: 'NULLABLE'"; + execSync(`node createTable.js ${datasetId} newTable`); + const output = execSync(`node relaxColumn.js ${datasetId} newTable`); + assert.include(output, column); + }); + + it('should get labels on a table', async () => { + execSync(`node labelTable.js ${datasetId} ${tableId}`); + const output = execSync(`node getTableLabels.js ${datasetId} ${tableId}`); + assert.include(output, `${tableId} Labels:`); + assert.include(output, 'color: green'); + }); + + describe('Views', () => { + it('should create a view', async () => { + const output = execSync(`node createView.js ${datasetId} ${viewId}`); + assert.include(output, `View ${viewId} created.`); + const [exists] = await bigquery + .dataset(datasetId) + .table(viewId) + .exists(); + assert.ok(exists); + }); + + it('should get a view', async () => { + const viewId = generateUuid(); + execSync(`node createView.js ${datasetId} ${viewId}`); + const output = execSync(`node getView.js ${datasetId} ${viewId}`); + assert.match(output, /View at/); + assert.match(output, /View query:/); + }); + + it('should update a view', async () => { + const output = execSync(`node updateViewQuery.js ${datasetId} ${viewId}`); + assert.include(output, `View ${viewId} updated.`); + }); + }); + + describe('Delete Table', () => { + const datasetId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + const tableId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + + before(async () => { + const datasetOptions = { + location: 'US', + }; + const tableOptions = { + location: 'US', + }; + + await bigquery.createDataset(datasetId, datasetOptions); + // Create a new table in the dataset + await bigquery.dataset(datasetId).createTable(tableId, tableOptions); + }); + + after(async () => { + await bigquery + .dataset(datasetId) + .delete({force: true}) + .catch(console.warn); + }); + + it('should delete a table', async () => { + const output = execSync(`node deleteTable.js ${datasetId} ${tableId}`); + assert.include(output, `Table ${tableId} deleted.`); + const [exists] = await bigquery + .dataset(datasetId) + .table(tableId) + .exists(); + assert.strictEqual(exists, false); + }); + + it('should undelete a table', async () => { + const tableId = generateUuid(); + const recoveredTableId = generateUuid(); + + execSync(`node createTable.js ${datasetId} ${tableId}`); + const output = execSync( + `node undeleteTable.js ${datasetId} ${tableId} ${recoveredTableId}` + ); + + assert.include(output, `Table ${tableId} deleted.`); + assert.match(output, /Copied data from deleted table/); + const [exists] = await bigquery + .dataset(datasetId) + .table(recoveredTableId) + .exists(); + assert.strictEqual(exists, true); + }); }); }); diff --git a/samples/undeleteTable.js b/samples/undeleteTable.js new file mode 100644 index 00000000..b695e867 --- /dev/null +++ b/samples/undeleteTable.js @@ -0,0 +1,69 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Dataset + tableId = 'my_table_to_undelete', // Table to recover + recoveredTableId = 'my_recovered_table' // Recovered table +) { + // [START bigquery_undelete_table] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function undeleteTable() { + // Undeletes "my_table_to_undelete" from "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + // const tableId = "my_table_to_undelete"; + // const recoveredTableId = "my_recovered_table"; + + /** + * TODO(developer): Choose an appropriate snapshot point as epoch milliseconds. + * For this example, we choose the current time as we're about to delete the + * table immediately afterwards. + */ + const snapshotEpoch = Date.now(); + + // Delete the table + await bigquery + .dataset(datasetId) + .table(tableId) + .delete(); + + console.log(`Table ${tableId} deleted.`); + + // Construct the restore-from table ID using a snapshot decorator. + const snapshotTableId = `${tableId}@${snapshotEpoch}`; + + // Construct and run a copy job. + await bigquery + .dataset(datasetId) + .table(snapshotTableId) + .copy(bigquery.dataset(datasetId).table(recoveredTableId)); + + console.log( + `Copied data from deleted table ${tableId} to ${recoveredTableId}` + ); + } + // [END bigquery_undelete_table] + undeleteTable(); +} + +main(...process.argv.slice(2)); diff --git a/samples/updateDatasetAccess.js b/samples/updateDatasetAccess.js new file mode 100644 index 00000000..97263339 --- /dev/null +++ b/samples/updateDatasetAccess.js @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset') { + // [START bigquery_update_dataset_access] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateDatasetAccess() { + // Update a datasets's access controls. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + + // Create new role metadata + const newRole = { + role: 'READER', + entity_type: 'userByEmail', + userByEmail: 'sample.bigquery.dev@gmail.com', + }; + + // Retreive current dataset metadata + const dataset = bigquery.dataset(datasetId); + const [metadata] = await dataset.getMetadata(); + + // Add new role to role acess array + metadata.access.push(newRole); + const [apiResponse] = await dataset.setMetadata(metadata); + const newAccessRoles = apiResponse.access; + newAccessRoles.forEach(role => console.log(role)); + } + // [END bigquery_update_dataset_access] + updateDatasetAccess(); +} +main(...process.argv.slice(2)); diff --git a/samples/updateDatasetDescription.js b/samples/updateDatasetDescription.js new file mode 100644 index 00000000..687572f1 --- /dev/null +++ b/samples/updateDatasetDescription.js @@ -0,0 +1,42 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset') { + // [START bigquery_update_dataset_description] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateDatasetDescription() { + // Updates a dataset's description. + + // Retreive current dataset metadata + const dataset = bigquery.dataset(datasetId); + const [metadata] = await dataset.getMetadata(); + + // Set new dataset description + const description = 'New dataset description.'; + metadata.description = description; + + const [apiResponse] = await dataset.setMetadata(metadata); + const newDescription = apiResponse.description; + + console.log(`${datasetId} description: ${newDescription}`); + } + // [END bigquery_update_dataset_description] + updateDatasetDescription(); +} +main(...process.argv.slice(2)); diff --git a/samples/updateDatasetExpiration.js b/samples/updateDatasetExpiration.js new file mode 100644 index 00000000..4068e47c --- /dev/null +++ b/samples/updateDatasetExpiration.js @@ -0,0 +1,47 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset') { + // [START bigquery_update_dataset_expiration] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateDatasetExpiration() { + // Updates the lifetime of all tables in the dataset, in milliseconds. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_dataset"; + + // Retreive current dataset metadata + const dataset = bigquery.dataset(datasetId); + const [metadata] = await dataset.getMetadata(); + + // Set new dataset metadata + const expirationTime = 24 * 60 * 60 * 1000; + metadata.defaultTableExpirationMs = expirationTime.toString(); + + const [apiResponse] = await dataset.setMetadata(metadata); + const newExpirationTime = apiResponse.defaultTableExpirationMs; + + console.log(`${datasetId} expiration: ${newExpirationTime}`); + } + // [END bigquery_update_dataset_expiration] + updateDatasetExpiration(); +} +main(...process.argv.slice(2)); diff --git a/samples/updateModel.js b/samples/updateModel.js new file mode 100644 index 00000000..b4a332fc --- /dev/null +++ b/samples/updateModel.js @@ -0,0 +1,50 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: BigQuery Update Model +// description: Updates a model's metadata. +// usage: node updateModel.js + +function main(datasetId = 'my_datset', modelId = 'my_model') { + // [START bigquery_update_model_description] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateModel() { + // Updates a model's metadata. + + /** + * TODO(developer): Uncomment the following lines before running the sample + */ + // const datasetId = "my_dataset"; + // const modelId = "my__model"; + + const metadata = { + description: 'A really great model.', + }; + + const dataset = bigquery.dataset(datasetId); + const [apiResponse] = await dataset.model(modelId).setMetadata(metadata); + const newDescription = apiResponse.description; + + console.log(`${modelId} description: ${newDescription}`); + } + // [END bigquery_update_model_description] + updateModel(); +} +main(...process.argv.slice(2)); diff --git a/samples/updateRoutine.js b/samples/updateRoutine.js new file mode 100644 index 00000000..1240a45e --- /dev/null +++ b/samples/updateRoutine.js @@ -0,0 +1,52 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + routineId = 'my_routine' // Existing routine +) { + // [START bigquery_update_routine] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateRoutine() { + // Updates a routine named "my_routine" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset'; + // const routineId = 'my_routine'; + + const updates = { + description: 'New description', + }; + + const dataset = bigquery.dataset(datasetId); + + // Create routine reference + let routine = dataset.routine(routineId); + + // Make API call + [routine] = await routine.setMetadata(updates); + + console.log(`Routine description: ${routine.description}`); + } + updateRoutine(); + // [END bigquery_update_routine] +} +main(...process.argv.slice(2)); diff --git a/samples/updateTableDescription.js b/samples/updateTableDescription.js new file mode 100644 index 00000000..39d2d9de --- /dev/null +++ b/samples/updateTableDescription.js @@ -0,0 +1,41 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(datasetId = 'my_dataset', tableId = 'my_table') { + // [START bigquery_update_table_description] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateTableDescription() { + // Updates a table's description. + + // Retreive current table metadata + const table = bigquery.dataset(datasetId).table(tableId); + const [metadata] = await table.getMetadata(); + + // Set new table description + const description = 'New table description.'; + metadata.description = description; + const [apiResponse] = await table.setMetadata(metadata); + const newDescription = apiResponse.description; + + console.log(`${tableId} description: ${newDescription}`); + } + // [END bigquery_update_table_description] + updateTableDescription(); +} +main(...process.argv.slice(2)); diff --git a/samples/updateTableExpiration.js b/samples/updateTableExpiration.js new file mode 100644 index 00000000..9adcb511 --- /dev/null +++ b/samples/updateTableExpiration.js @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset + tableId = 'my_table', // Existing table + expirationTime = Date.now() + 1000 * 60 * 60 * 24 * 5 // 5 days from current time in ms +) { + // [START bigquery_update_table_expiration] + // Import the Google Cloud client library + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateTableExpiration() { + // Updates a table's expiration. + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = 'my_dataset', // Existing dataset + // const tableId = 'my_table', // Existing table + // const expirationTime = Date.now() + 1000 * 60 * 60 * 24 * 5 // 5 days from current time in ms + + // Retreive current table metadata + const table = bigquery.dataset(datasetId).table(tableId); + const [metadata] = await table.getMetadata(); + + // Set new table expiration to 5 days from current time + metadata.expirationTime = expirationTime.toString(); + const [apiResponse] = await table.setMetadata(metadata); + + const newExpirationTime = apiResponse.expirationTime; + console.log(`${tableId} expiration: ${newExpirationTime}`); + } + // [END bigquery_update_table_expiration] + updateTableExpiration(); +} +main(...process.argv.slice(2)); diff --git a/samples/updateViewQuery.js b/samples/updateViewQuery.js new file mode 100644 index 00000000..ad426e2b --- /dev/null +++ b/samples/updateViewQuery.js @@ -0,0 +1,58 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main( + datasetId = 'my_dataset', // Existing dataset ID + tableId = 'my_existing_view' // Existing view ID +) { + // [START bigquery_update_view_query] + // Import the Google Cloud client library and create a client + const {BigQuery} = require('@google-cloud/bigquery'); + const bigquery = new BigQuery(); + + async function updateViewQuery() { + // Updates a view named "my_existing_view" in "my_dataset". + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const datasetId = "my_existing_dataset" + // const tableId = "my_existing_table" + const dataset = await bigquery.dataset(datasetId); + + // This example updates a view into the USA names dataset to include state. + const newViewQuery = `SELECT name, state + FROM \`bigquery-public-data.usa_names.usa_1910_current\` + LIMIT 10`; + + // Retrieve existing view + const [view] = await dataset.table(tableId).get(); + + // Retrieve existing view metadata + const [metadata] = await view.getMetadata(); + + // Update view query + metadata.view = newViewQuery; + + // Set metadata + await view.setMetadata(metadata); + + console.log(`View ${tableId} updated.`); + } + // [END bigquery_update_view_query] + updateViewQuery(); +} +main(...process.argv.slice(2)); diff --git a/src/bigquery.ts b/src/bigquery.ts new file mode 100644 index 00000000..6df792c4 --- /dev/null +++ b/src/bigquery.ts @@ -0,0 +1,2183 @@ +/*! + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as common from '@google-cloud/common'; +import {paginator, ResourceStream} from '@google-cloud/paginator'; +import {promisifyAll} from '@google-cloud/promisify'; +import arrify = require('arrify'); +import {Big} from 'big.js'; +import * as extend from 'extend'; +import * as is from 'is'; +import * as uuid from 'uuid'; + +import {Dataset, DatasetOptions} from './dataset'; +import {Job, JobOptions, QueryResultsOptions} from './job'; +import { + Table, + TableField, + TableSchema, + TableRow, + TableRowField, + JobCallback, + JobResponse, + RowMetadata, +} from './table'; +import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; +import bigquery from './types'; + +export interface RequestCallback { + (err: Error | null, response?: T | null): void; +} + +export interface ResourceCallback { + (err: Error | null, resource?: T | null, response?: R | null): void; +} + +export type PagedResponse = [T[]] | [T[], Q | null, R]; +export interface PagedCallback { + ( + err: Error | null, + resource?: T[] | null, + nextQuery?: Q | null, + response?: R | null + ): void; +} + +export type JobRequest = J & { + jobId?: string; + jobPrefix?: string; + location?: string; +}; + +export type PagedRequest

= P & { + autoPaginate?: boolean; + maxApiCalls?: number; +}; + +export type QueryRowsResponse = PagedResponse< + RowMetadata, + Query, + bigquery.IGetQueryResultsResponse +>; +export type QueryRowsCallback = PagedCallback< + RowMetadata, + Query, + bigquery.IGetQueryResultsResponse +>; + +export type SimpleQueryRowsResponse = [RowMetadata[], bigquery.IJob]; +export type SimpleQueryRowsCallback = ResourceCallback< + RowMetadata[], + bigquery.IJob +>; + +export type Query = JobRequest & { + destination?: Table; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + params?: any[] | {[param: string]: any}; + dryRun?: boolean; + labels?: {[label: string]: string}; + types?: string[] | string[][] | {[type: string]: string | string[]}; + job?: Job; + maxResults?: number; + jobTimeoutMs?: number; + pageToken?: string; + wrapIntegers?: boolean | IntegerTypeCastOptions; +}; + +export type QueryOptions = QueryResultsOptions; +export type QueryStreamOptions = { + wrapIntegers?: boolean | IntegerTypeCastOptions; +}; +export type DatasetResource = bigquery.IDataset; +export type ValueType = bigquery.IQueryParameterType; + +export type GetDatasetsOptions = PagedRequest; +export type DatasetsResponse = PagedResponse< + Dataset, + GetDatasetsOptions, + bigquery.IDatasetList +>; +export type DatasetsCallback = PagedCallback< + Dataset, + GetDatasetsOptions, + bigquery.IDatasetList +>; + +export type DatasetResponse = [Dataset, bigquery.IDataset]; +export type DatasetCallback = ResourceCallback; + +export type GetJobsOptions = PagedRequest; +export type GetJobsResponse = PagedResponse< + Job, + GetJobsOptions, + bigquery.IJobList +>; +export type GetJobsCallback = PagedCallback< + Job, + GetJobsOptions, + bigquery.IJobList +>; + +export interface BigQueryTimeOptions { + hours?: number | string; + minutes?: number | string; + seconds?: number | string; + fractional?: number | string; +} + +export interface BigQueryDateOptions { + year?: number | string; + month?: number | string; + day?: number | string; +} + +export interface BigQueryDatetimeOptions { + year?: string | number; + month?: string | number; + day?: string | number; + hours?: string | number; + minutes?: string | number; + seconds?: string | number; + fractional?: string | number; +} + +export type ProvidedTypeArray = Array; + +export interface ProvidedTypeStruct { + [key: string]: string | ProvidedTypeArray | ProvidedTypeStruct; +} + +export type QueryParameter = bigquery.IQueryParameter; + +export interface BigQueryOptions extends common.GoogleAuthOptions { + autoRetry?: boolean; + maxRetries?: number; + location?: string; + userAgent?: string; + /** + * The API endpoint of the service used to make requests. + * Defaults to `bigquery.googleapis.com`. + */ + apiEndpoint?: string; +} + +export interface IntegerTypeCastOptions { + integerTypeCastFunction: Function; + fields?: string | string[]; +} + +export type IntegerTypeCastValue = { + integerValue: string | number; + schemaFieldName?: string; +}; + +export const PROTOCOL_REGEX = /^(\w*):\/\//; + +/** + * @typedef {object} BigQueryOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running in + * an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to a + * JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [token] An OAUTH access token. If provided, we will not + * manage fetching, re-using, and re-minting access tokens. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {boolean} [autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server errors. + * We will exponentially backoff subsequent requests by default. + * @property {number} [maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {Constructor} [promise] Custom promise module to use instead of + * native Promises. + * @property {string} [location] The geographic location of all datasets and + * jobs referenced and created through the client. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {string[]} [scopes] Additional OAuth scopes to use in requests. For + * example, to access an external data source, you may need the + * `https://www.googleapis.com/auth/drive.readonly` scope. + * @property {string=} apiEndpoint The API endpoint of the service used to make requests. Defaults to `bigquery.googleapis.com`. + */ + +/** + * In the following examples from this page and the other modules (`Dataset`, + * `Table`, etc.), we are going to be using a dataset from + * [data.gov](http://goo.gl/f2SXcb) of higher education institutions. + * + * We will create a table with the correct schema, import the public CSV file + * into that table, and query it for data. + * + * @class + * + * @see [What is BigQuery?]{@link https://cloud.google.com/bigquery/what-is-bigquery} + * + * @param {BigQueryOptions} options Constructor options. + * + * @example Install the client library with npm: + * npm install @google-cloud/bigquery + * + * @example Import the client library + * const {BigQuery} = require('@google-cloud/bigquery'); + * + * @example Create a client that uses Application Default Credentials (ADC): + * const bigquery = new BigQuery(); + * + * @example Create a client with explicit credentials: + * const bigquery = new BigQuery({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * + * @example include:samples/quickstart.js + * region_tag:bigquery_quickstart + * Full quickstart example: + */ +export class BigQuery extends common.Service { + location?: string; + + createQueryStream: (options?: Query | string) => ResourceStream; + getDatasetsStream: (options?: GetDatasetsOptions) => ResourceStream; + getJobsStream: (options?: GetJobsOptions) => ResourceStream; + + constructor(options: BigQueryOptions = {}) { + let apiEndpoint = 'https://bigquery.googleapis.com'; + + const EMULATOR_HOST = process.env.BIGQUERY_EMULATOR_HOST; + + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = BigQuery.sanitizeEndpoint(EMULATOR_HOST); + } + + if (options.apiEndpoint) { + apiEndpoint = BigQuery.sanitizeEndpoint(options.apiEndpoint); + } + + options = Object.assign({}, options, { + apiEndpoint, + }); + + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/bigquery/v2`; + + const config = { + apiEndpoint: options.apiEndpoint!, + baseUrl, + scopes: ['https://www.googleapis.com/auth/bigquery'], + packageJson: require('../../package.json'), + }; + + if (options.scopes) { + config.scopes = config.scopes.concat(options.scopes); + } + + super(config, options); + + /** + * @name BigQuery#location + * @type {string} + */ + this.location = options.location; + /** + * Run a query scoped to your project as a readable object stream. + * + * @param {object} query Configuration object. See {@link Query} for a complete + * list of options. + * @returns {stream} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT + * 100'; + * + * bigquery.createQueryStream(query) + * .on('error', console.error) + * .on('data', function(row) { + * // row is a result from your query. + * }) + * .on('end', function() { + * // All rows retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bigquery.createQueryStream(query) + * .on('data', function(row) { + * this.end(); + * }); + */ + this.createQueryStream = paginator.streamify('queryAsStream_'); + + /** + * List all or some of the {@link Dataset} objects in your project as + * a readable object stream. + * + * @param {object} [options] Configuration object. See + * {@link BigQuery#getDatasets} for a complete list of options. + * @returns {stream} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * bigquery.getDatasetsStream() + * .on('error', console.error) + * .on('data', function(dataset) { + * // dataset is a Dataset object. + * }) + * .on('end', function() { + * // All datasets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bigquery.getDatasetsStream() + * .on('data', function(dataset) { + * this.end(); + * }); + */ + this.getDatasetsStream = paginator.streamify('getDatasets'); + + /** + * List all or some of the {@link Job} objects in your project as a + * readable object stream. + * + * @param {object} [options] Configuration object. See + * {@link BigQuery#getJobs} for a complete list of options. + * @returns {stream} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * bigquery.getJobsStream() + * .on('error', console.error) + * .on('data', function(job) { + * // job is a Job object. + * }) + * .on('end', function() { + * // All jobs retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bigquery.getJobsStream() + * .on('data', function(job) { + * this.end(); + * }); + */ + this.getJobsStream = paginator.streamify('getJobs'); + + // Disable `prettyPrint` for better performance. + // https://github.com/googleapis/nodejs-bigquery/issues/858 + this.interceptors.push({ + request: (reqOpts: common.DecorateRequestOptions) => { + return extend(true, {}, reqOpts, {qs: {prettyPrint: false}}); + }, + }); + } + + private static sanitizeEndpoint(url: string) { + if (!PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + + /** + * Merge a rowset returned from the API with a table schema. + * + * @private + * + * @param {object} schema + * @param {array} rows + * @param {boolean|IntegerTypeCastOptions} wrapIntegers Wrap values of + * 'INT64' type in {@link BigQueryInt} objects. + * If a `boolean`, this will wrap values in {@link BigQueryInt} objects. + * If an `object`, this will return a value returned by + * `wrapIntegers.integerTypeCastFunction`. + * Please see {@link IntegerTypeCastOptions} for options descriptions. + * @param {array} selectedFields List of fields to return. + * If unspecified, all fields are returned. + * @returns {array} Fields using their matching names from the table's schema. + */ + static mergeSchemaWithRows_( + schema: TableSchema | TableField, + rows: TableRow[], + wrapIntegers: boolean | IntegerTypeCastOptions, + selectedFields?: string[] + ) { + if (selectedFields && selectedFields!.length > 0) { + const selectedFieldsArray = selectedFields!.map(c => { + return c.split('.'); + }); + + const currentFields = selectedFieldsArray.map(c => c.shift()); + //filter schema fields based on selected fields. + schema.fields = schema.fields?.filter( + field => + currentFields + .map(c => c!.toLowerCase()) + .indexOf(field.name!.toLowerCase()) >= 0 + ); + selectedFields = selectedFieldsArray + .filter(c => c.length > 0) + .map(c => c.join('.')); + } + + return arrify(rows) + .map(mergeSchema) + .map(flattenRows); + function mergeSchema(row: TableRow) { + return row.f!.map((field: TableRowField, index: number) => { + const schemaField = schema.fields![index]; + let value = field.v; + if (schemaField.mode === 'REPEATED') { + value = (value as TableRowField[]).map(val => { + return convert(schemaField, val.v, wrapIntegers, selectedFields); + }); + } else { + value = convert(schemaField, value, wrapIntegers, selectedFields); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const fieldObject: any = {}; + fieldObject[schemaField.name!] = value; + return fieldObject; + }); + } + + function convert( + schemaField: TableField, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: any, + wrapIntegers: boolean | IntegerTypeCastOptions, + selectedFields?: string[] + ) { + if (is.null(value)) { + return value; + } + + switch (schemaField.type) { + case 'BOOLEAN': + case 'BOOL': { + value = value.toLowerCase() === 'true'; + break; + } + case 'BYTES': { + value = Buffer.from(value, 'base64'); + break; + } + case 'FLOAT': + case 'FLOAT64': { + value = Number(value); + break; + } + case 'INTEGER': + case 'INT64': { + value = wrapIntegers + ? typeof wrapIntegers === 'object' + ? BigQuery.int( + {integerValue: value, schemaFieldName: schemaField.name}, + wrapIntegers + ).valueOf() + : BigQuery.int(value) + : Number(value); + break; + } + case 'NUMERIC': { + value = new Big(value); + break; + } + case 'BIGNUMERIC': { + value = new Big(value); + break; + } + case 'RECORD': { + value = BigQuery.mergeSchemaWithRows_( + schemaField, + value, + wrapIntegers, + selectedFields + ).pop(); + break; + } + case 'DATE': { + value = BigQuery.date(value); + break; + } + case 'DATETIME': { + value = BigQuery.datetime(value); + break; + } + case 'TIME': { + value = BigQuery.time(value); + break; + } + case 'TIMESTAMP': { + value = BigQuery.timestamp(new Date(value * 1000)); + break; + } + case 'GEOGRAPHY': { + value = BigQuery.geography(value); + break; + } + default: + break; + } + + return value; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + function flattenRows(rows: any[]) { + return rows.reduce((acc, row) => { + const key = Object.keys(row)[0]; + acc[key] = row[key]; + return acc; + }, {}); + } + } + + /** + * The `DATE` type represents a logical calendar date, independent of time + * zone. It does not represent a specific 24-hour time period. Rather, a given + * DATE value represents a different 24-hour period when interpreted in + * different time zones, and may represent a shorter or longer day during + * Daylight Savings Time transitions. + * + * @param {object|string} value The date. If a string, this should be in the + * format the API describes: `YYYY-[M]M-[D]D`. + * Otherwise, provide an object. + * @param {string|number} value.year Four digits. + * @param {string|number} value.month One or two digits. + * @param {string|number} value.day One or two digits. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const date = bigquery.date('2017-01-01'); + * + * //- + * // Alternatively, provide an object. + * //- + * const date2 = bigquery.date({ + * year: 2017, + * month: 1, + * day: 1 + * }); + */ + static date(value: BigQueryDateOptions | string) { + return new BigQueryDate(value); + } + + /** + * @param {object|string} value The date. If a string, this should be in the + * format the API describes: `YYYY-[M]M-[D]D`. + * Otherwise, provide an object. + * @param {string|number} value.year Four digits. + * @param {string|number} value.month One or two digits. + * @param {string|number} value.day One or two digits. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const date = BigQuery.date('2017-01-01'); + * + * //- + * // Alternatively, provide an object. + * //- + * const date2 = BigQuery.date({ + * year: 2017, + * month: 1, + * day: 1 + * }); + */ + + date(value: BigQueryDateOptions | string) { + return BigQuery.date(value); + } + + /** + * A `DATETIME` data type represents a point in time. Unlike a `TIMESTAMP`, + * this does not refer to an absolute instance in time. Instead, it is the + * civil time, or the time that a user would see on a watch or calendar. + * + * @method BigQuery.datetime + * @param {object|string} value The time. If a string, this should be in the + * format the API describes: `YYYY-[M]M-[D]D[ [H]H:[M]M:[S]S[.DDDDDD]]`. + * Otherwise, provide an object. + * @param {string|number} value.year Four digits. + * @param {string|number} value.month One or two digits. + * @param {string|number} value.day One or two digits. + * @param {string|number} [value.hours] One or two digits (`00` - `23`). + * @param {string|number} [value.minutes] One or two digits (`00` - `59`). + * @param {string|number} [value.seconds] One or two digits (`00` - `59`). + * @param {string|number} [value.fractional] Up to six digits for microsecond + * precision. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const datetime = BigQuery.datetime('2017-01-01 13:00:00'); + * + * //- + * // Alternatively, provide an object. + * //- + * const datetime = BigQuery.datetime({ + * year: 2017, + * month: 1, + * day: 1, + * hours: 14, + * minutes: 0, + * seconds: 0 + * }); + */ + + /** + * A `DATETIME` data type represents a point in time. Unlike a `TIMESTAMP`, + * this does not refer to an absolute instance in time. Instead, it is the + * civil time, or the time that a user would see on a watch or calendar. + * + * @method BigQuery#datetime + * @param {object|string} value The time. If a string, this should be in the + * format the API describes: `YYYY-[M]M-[D]D[ [H]H:[M]M:[S]S[.DDDDDD]]`. + * Otherwise, provide an object. + * @param {string|number} value.year Four digits. + * @param {string|number} value.month One or two digits. + * @param {string|number} value.day One or two digits. + * @param {string|number} [value.hours] One or two digits (`00` - `23`). + * @param {string|number} [value.minutes] One or two digits (`00` - `59`). + * @param {string|number} [value.seconds] One or two digits (`00` - `59`). + * @param {string|number} [value.fractional] Up to six digits for microsecond + * precision. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const datetime = bigquery.datetime('2017-01-01 13:00:00'); + * + * //- + * // Alternatively, provide an object. + * //- + * const datetime = bigquery.datetime({ + * year: 2017, + * month: 1, + * day: 1, + * hours: 14, + * minutes: 0, + * seconds: 0 + * }); + */ + static datetime(value: BigQueryDatetimeOptions | string) { + return new BigQueryDatetime(value); + } + + datetime(value: BigQueryDatetimeOptions | string) { + return BigQuery.datetime(value); + } + + /** + * A `TIME` data type represents a time, independent of a specific date. + * + * @method BigQuery.time + * @param {object|string} value The time. If a string, this should be in the + * format the API describes: `[H]H:[M]M:[S]S[.DDDDDD]`. Otherwise, provide + * an object. + * @param {string|number} [value.hours] One or two digits (`00` - `23`). + * @param {string|number} [value.minutes] One or two digits (`00` - `59`). + * @param {string|number} [value.seconds] One or two digits (`00` - `59`). + * @param {string|number} [value.fractional] Up to six digits for microsecond + * precision. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const time = BigQuery.time('14:00:00'); // 2:00 PM + * + * //- + * // Alternatively, provide an object. + * //- + * const time = BigQuery.time({ + * hours: 14, + * minutes: 0, + * seconds: 0 + * }); + */ + + /** + * A `TIME` data type represents a time, independent of a specific date. + * + * @method BigQuery#time + * @param {object|string} value The time. If a string, this should be in the + * format the API describes: `[H]H:[M]M:[S]S[.DDDDDD]`. Otherwise, provide + * an object. + * @param {string|number} [value.hours] One or two digits (`00` - `23`). + * @param {string|number} [value.minutes] One or two digits (`00` - `59`). + * @param {string|number} [value.seconds] One or two digits (`00` - `59`). + * @param {string|number} [value.fractional] Up to six digits for microsecond + * precision. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const time = bigquery.time('14:00:00'); // 2:00 PM + * + * //- + * // Alternatively, provide an object. + * //- + * const time = bigquery.time({ + * hours: 14, + * minutes: 0, + * seconds: 0 + * }); + */ + static time(value: BigQueryTimeOptions | string) { + return new BigQueryTime(value); + } + + time(value: BigQueryTimeOptions | string) { + return BigQuery.time(value); + } + + /** + * A timestamp represents an absolute point in time, independent of any time + * zone or convention such as Daylight Savings Time. + * + * @method BigQuery.timestamp + * @param {Date|string} value The time. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const timestamp = BigQuery.timestamp(new Date()); + */ + + /** + * A timestamp represents an absolute point in time, independent of any time + * zone or convention such as Daylight Savings Time. + * + * @method BigQuery#timestamp + * @param {Date|string} value The time. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const timestamp = bigquery.timestamp(new Date()); + */ + static timestamp(value: Date | string) { + return new BigQueryTimestamp(value); + } + + timestamp(value: Date | string) { + return BigQuery.timestamp(value); + } + + /** + * A BigQueryInt wraps 'INT64' values. Can be used to maintain precision. + * + * @method BigQuery#int + * @param {string|number|IntegerTypeCastValue} value The INT64 value to convert. + * @param {IntegerTypeCastOptions} typeCastOptions Configuration to convert + * value. Must provide an `integerTypeCastFunction` to handle conversion. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * const largeIntegerValue = Number.MAX_SAFE_INTEGER + 1; + * + * const options = { + * integerTypeCastFunction: value => value.split(), + * }; + * + * const bqInteger = bigquery.int(largeIntegerValue, options); + * + * const customValue = bqInteger.valueOf(); + * // customValue is the value returned from your `integerTypeCastFunction`. + */ + static int( + value: string | number | IntegerTypeCastValue, + typeCastOptions?: IntegerTypeCastOptions + ) { + return new BigQueryInt(value, typeCastOptions); + } + + int( + value: string | number | IntegerTypeCastValue, + typeCastOptions?: IntegerTypeCastOptions + ) { + return BigQuery.int(value, typeCastOptions); + } + + /** + * A geography value represents a surface area on the Earth + * in Well-known Text (WKT) format. + * + * @method BigQuery.geography + * @param {string} value The geospatial data. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const geography = BigQuery.geography('POINT(1, 2)'); + */ + + /** + * A geography value represents a surface area on the Earth + * in Well-known Text (WKT) format. + * + * @method BigQuery#geography + * @param {string} value The geospatial data. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const geography = bigquery.geography('POINT(1, 2)'); + */ + static geography(value: string) { + return new Geography(value); + } + + geography(value: string) { + return BigQuery.geography(value); + } + + /** + * Convert an INT64 value to Number. + * + * @private + * @param {object} value The INT64 value to convert. + */ + static decodeIntegerValue_(value: IntegerTypeCastValue) { + const num = Number(value.integerValue); + if (!Number.isSafeInteger(num)) { + throw new Error( + 'We attempted to return all of the numeric values, but ' + + (value.schemaFieldName ? value.schemaFieldName + ' ' : '') + + 'value ' + + value.integerValue + + " is out of bounds of 'Number.MAX_SAFE_INTEGER'.\n" + + "To prevent this error, please consider passing 'options.wrapNumbers' as\n" + + '{\n' + + ' integerTypeCastFunction: provide \n' + + ' fields: optionally specify field name(s) to be custom casted\n' + + '}\n' + ); + } + return num; + } + + /** + * Return a value's provided type. + * + * @private + * + * @throws {error} If the type provided is invalid. + * + * @see [Data Type]{@link https://cloud.google.com/bigquery/data-types} + * + * @param {*} providedType The type. + * @returns {string} The valid type provided. + */ + static getTypeDescriptorFromProvidedType_( + providedType: string | ProvidedTypeStruct | ProvidedTypeArray + ): ValueType { + // The list of types can be found in src/types.d.ts + const VALID_TYPES = [ + 'DATE', + 'DATETIME', + 'TIME', + 'TIMESTAMP', + 'BYTES', + 'NUMERIC', + 'BIGNUMERIC', + 'BOOL', + 'INT64', + 'FLOAT64', + 'STRING', + 'GEOGRAPHY', + 'ARRAY', + 'STRUCT', + ]; + + if (is.array(providedType)) { + providedType = providedType as Array; + return { + type: 'ARRAY', + arrayType: BigQuery.getTypeDescriptorFromProvidedType_(providedType[0]), + }; + } else if (is.object(providedType)) { + return { + type: 'STRUCT', + structTypes: Object.keys(providedType).map(prop => { + return { + name: prop, + type: BigQuery.getTypeDescriptorFromProvidedType_( + (providedType as ProvidedTypeStruct)[prop] + ), + }; + }), + }; + } + + providedType = (providedType as string).toUpperCase(); + if (!VALID_TYPES.includes(providedType)) { + throw new Error(`Invalid type provided: "${providedType}"`); + } + + return {type: providedType.toUpperCase()}; + } + + /** + * Detect a value's type. + * + * @private + * + * @throws {error} If the type could not be detected. + * + * @see [Data Type]{@link https://cloud.google.com/bigquery/data-types} + * + * @param {*} value The value. + * @returns {string} The type detected from the value. + */ + static getTypeDescriptorFromValue_(value: unknown): ValueType { + let typeName; + + if (value === null) { + throw new Error( + "Parameter types must be provided for null values via the 'types' field in query options." + ); + } + + if (value instanceof BigQueryDate) { + typeName = 'DATE'; + } else if (value instanceof BigQueryDatetime) { + typeName = 'DATETIME'; + } else if (value instanceof BigQueryTime) { + typeName = 'TIME'; + } else if (value instanceof BigQueryTimestamp) { + typeName = 'TIMESTAMP'; + } else if (value instanceof Buffer) { + typeName = 'BYTES'; + } else if (value instanceof Big) { + if (value.c.length - value.e >= 10) { + typeName = 'BIGNUMERIC'; + } else { + typeName = 'NUMERIC'; + } + } else if (value instanceof BigQueryInt) { + typeName = 'INT64'; + } else if (value instanceof Geography) { + typeName = 'GEOGRAPHY'; + } else if (Array.isArray(value)) { + if (value.length === 0) { + throw new Error( + "Parameter types must be provided for empty arrays via the 'types' field in query options." + ); + } + return { + type: 'ARRAY', + arrayType: BigQuery.getTypeDescriptorFromValue_(value[0]), + }; + } else if (is.boolean(value)) { + typeName = 'BOOL'; + } else if (is.number(value)) { + typeName = (value as number) % 1 === 0 ? 'INT64' : 'FLOAT64'; + } else if (is.object(value)) { + return { + type: 'STRUCT', + structTypes: Object.keys(value as object).map(prop => { + return { + name: prop, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + type: BigQuery.getTypeDescriptorFromValue_((value as any)[prop]), + }; + }), + }; + } else if (is.string(value)) { + typeName = 'STRING'; + } + + if (!typeName) { + throw new Error( + [ + 'This value could not be translated to a BigQuery data type.', + value, + ].join('\n') + ); + } + + return { + type: typeName, + }; + } + + /** + * Convert a value into a `queryParameter` object. + * + * @private + * + * @see [Jobs.query API Reference Docs (see `queryParameters`)]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#request-body} + * + * @param {*} value The value. + * @param {string|ProvidedTypeStruct|ProvidedTypeArray} providedType Provided + * query parameter type. + * @returns {object} A properly-formed `queryParameter` object. + */ + static valueToQueryParameter_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: any, + providedType?: string | ProvidedTypeStruct | ProvidedTypeArray + ) { + if (is.date(value)) { + value = BigQuery.timestamp(value as Date); + } + let parameterType: bigquery.IQueryParameterType; + if (providedType) { + parameterType = BigQuery.getTypeDescriptorFromProvidedType_(providedType); + } else { + parameterType = BigQuery.getTypeDescriptorFromValue_(value); + } + const queryParameter: QueryParameter = {parameterType, parameterValue: {}}; + + const typeName = queryParameter!.parameterType!.type!; + if (typeName === 'ARRAY') { + queryParameter.parameterValue!.arrayValues = (value as Array<{}>).map( + itemValue => { + const value = BigQuery._getValue(itemValue, parameterType.arrayType!); + if (is.object(value) || is.array(value)) { + if (is.array(providedType)) { + providedType = providedType as []; + return BigQuery.valueToQueryParameter_(value, providedType[0]) + .parameterValue!; + } else { + return BigQuery.valueToQueryParameter_(value).parameterValue!; + } + } + return {value} as bigquery.IQueryParameterValue; + } + ); + } else if (typeName === 'STRUCT') { + queryParameter.parameterValue!.structValues = Object.keys(value).reduce( + (structValues, prop) => { + let nestedQueryParameter; + if (providedType) { + nestedQueryParameter = BigQuery.valueToQueryParameter_( + value[prop], + (providedType as ProvidedTypeStruct)[prop] + ); + } else { + nestedQueryParameter = BigQuery.valueToQueryParameter_(value[prop]); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (structValues as any)[prop] = nestedQueryParameter.parameterValue; + return structValues; + }, + {} + ); + } else { + queryParameter.parameterValue!.value = BigQuery._getValue( + value, + parameterType + ); + } + + return queryParameter; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private static _getValue(value: any, type: ValueType): any { + if (value === null) { + return null; + } + if (value.type) type = value; + return BigQuery._isCustomType(type) ? value.value : value; + } + + private static _isCustomType({type}: ValueType): boolean { + return ( + type!.indexOf('TIME') > -1 || + type!.indexOf('DATE') > -1 || + type!.indexOf('GEOGRAPHY') > -1 || + type!.indexOf('BigQueryInt') > -1 + ); + } + + createDataset( + id: string, + options?: DatasetResource + ): Promise; + createDataset( + id: string, + options: DatasetResource, + callback: DatasetCallback + ): void; + createDataset(id: string, callback: DatasetCallback): void; + /** + * Create a dataset. + * + * @see [Datasets: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/insert} + * + * @param {string} id ID of the dataset to create. + * @param {object} [options] See a + * [Dataset + * resource](https://cloud.google.com/bigquery/docs/reference/v2/datasets#resource). + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request + * @param {Dataset} callback.dataset The newly created dataset + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * bigquery.createDataset('my-dataset', function(err, dataset, apiResponse) + * {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bigquery.createDataset('my-dataset').then(function(data) { + * const dataset = data[0]; + * const apiResponse = data[1]; + * }); + */ + createDataset( + id: string, + optionsOrCallback?: DatasetResource | DatasetCallback, + cb?: DatasetCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + this.request( + { + method: 'POST', + uri: '/datasets', + json: extend( + true, + { + location: this.location, + }, + options, + { + datasetReference: { + datasetId: id, + }, + } + ), + }, + (err, resp) => { + if (err) { + callback!(err, null, resp); + return; + } + + const dataset = this.dataset(id); + dataset.metadata = resp; + + callback!(null, dataset, resp); + } + ); + } + + createQueryJob(options: Query | string): Promise; + createQueryJob(options: Query | string, callback: JobCallback): void; + /** + * Run a query as a job. No results are immediately returned. Instead, your + * callback will be executed with a {@link Job} object that you must + * ping for the results. See the Job documentation for explanations of how to + * check on the status of the job. + * + * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} + * + * @param {object|string} options The configuration object. This must be in + * the format of the [`configuration.query`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery) + * property of a Jobs resource. If a string is provided, this is used as the + * query string, and all other options are defaulted. + * @param {Table} [options.destination] The table to save the + * query's results to. If omitted, a new table will be created. + * @param {boolean} [options.dryRun] If set, don't actually run this job. A + * valid query will update the job with processing statistics. These can + * be accessed via `job.metadata`. + * @param {object} [options.labels] String key/value pairs to be attached as + * labels to the newly created Job. + * @param {string} [options.location] The geographic location of the job. + * Required except for US and EU. + * @param {number} [options.jobTimeoutMs] Job timeout in milliseconds. + * If this time limit is exceeded, BigQuery might attempt to stop the job. + * @param {string} [options.jobId] Custom job id. + * @param {string} [options.jobPrefix] Prefix to apply to the job id. + * @param {string} options.query A query string, following the BigQuery query + * syntax, of the query to execute. + * @param {boolean} [options.useLegacySql=false] Option to use legacy sql syntax. + * @param {object} [options.defaultDataset] The dataset. This must be in + * the format of the [`DatasetReference`](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#DatasetReference) + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request. + * @param {Job} callback.job The newly created job for your query. + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @throws {Error} If a query is not specified. + * @throws {Error} If a Table is not provided as a destination. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT + * 100'; + * + * //- + * // You may pass only a query string, having a new table created to store + * the + * // results of the query. + * //- + * bigquery.createQueryJob(query, function(err, job) {}); + * + * //- + * // You can also control the destination table by providing a + * // {@link Table} object. + * //- + * bigquery.createQueryJob({ + * destination: bigquery.dataset('higher_education').table('institutions'), + * query: query + * }, function(err, job) {}); + * + * //- + * // After you have run `createQueryJob`, your query will execute in a job. + * Your + * // callback is executed with a {@link Job} object so that you may + * // check for the results. + * //- + * bigquery.createQueryJob(query, function(err, job) { + * if (!err) { + * job.getQueryResults(function(err, rows, apiResponse) {}); + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bigquery.createQueryJob(query).then(function(data) { + * const job = data[0]; + * const apiResponse = data[1]; + * + * return job.getQueryResults(); + * }); + */ + createQueryJob( + opts: Query | string, + callback?: JobCallback + ): void | Promise { + const options = typeof opts === 'object' ? opts : {query: opts}; + if ((!options || !options.query) && !options.pageToken) { + throw new Error('A SQL query string is required.'); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const query: any = extend( + true, + { + useLegacySql: false, + }, + options + ); + + if (options.destination) { + if (!(options.destination instanceof Table)) { + throw new Error('Destination must be a Table object.'); + } + + query.destinationTable = { + datasetId: options.destination.dataset.id, + projectId: options.destination.dataset.bigQuery.projectId, + tableId: options.destination.id, + }; + + delete query.destination; + } + + if (query.params) { + query.parameterMode = is.array(query.params) ? 'positional' : 'named'; + + if (query.parameterMode === 'named') { + query.queryParameters = []; + + // tslint:disable-next-line forin + for (const namedParameter in query.params) { + const value = query.params[namedParameter]; + let queryParameter; + + if (query.types) { + if (!is.object(query.types)) { + throw new Error( + 'Provided types must match the value type passed to `params`' + ); + } + + if (query.types[namedParameter]) { + queryParameter = BigQuery.valueToQueryParameter_( + value, + query.types[namedParameter] + ); + } else { + queryParameter = BigQuery.valueToQueryParameter_(value); + } + } else { + queryParameter = BigQuery.valueToQueryParameter_(value); + } + + queryParameter.name = namedParameter; + query.queryParameters.push(queryParameter); + } + } else { + query.queryParameters = []; + + if (query.types) { + if (!is.array(query.types)) { + throw new Error( + 'Provided types must match the value type passed to `params`' + ); + } + + if (query.params.length !== query.types.length) { + throw new Error('Incorrect number of parameter types provided.'); + } + query.params.forEach((value: {}, i: number) => { + const queryParameter = BigQuery.valueToQueryParameter_( + value, + query.types[i] + ); + query.queryParameters.push(queryParameter); + }); + } else { + query.params.forEach((value: {}) => { + const queryParameter = BigQuery.valueToQueryParameter_(value); + query.queryParameters.push(queryParameter); + }); + } + } + delete query.params; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const reqOpts: any = { + configuration: { + query, + }, + }; + + if (typeof query.jobTimeoutMs === 'number') { + reqOpts.configuration.jobTimeoutMs = query.jobTimeoutMs; + delete query.jobTimeoutMs; + } + + if (query.dryRun) { + reqOpts.configuration.dryRun = query.dryRun; + delete query.dryRun; + } + + if (query.labels) { + reqOpts.configuration.labels = query.labels; + delete query.labels; + } + + if (query.jobPrefix) { + reqOpts.jobPrefix = query.jobPrefix; + delete query.jobPrefix; + } + + if (query.location) { + reqOpts.location = query.location; + delete query.location; + } + + if (query.jobId) { + reqOpts.jobId = query.jobId; + delete query.jobId; + } + + this.createJob(reqOpts, callback!); + } + + createJob(options: JobOptions): Promise; + createJob(options: JobOptions, callback: JobCallback): void; + /** + * Creates a job. Typically when creating a job you'll have a very specific + * task in mind. For this we recommend one of the following methods: + * + * - {@link BigQuery#createQueryJob} + * - {@link Table#createCopyJob} + * - {@link Table#createCopyFromJob} + * - {@link Table#createExtractJob} + * - {@link Table#createLoadJob} + * + * However in the event you need a finer level of control over the job + * creation, you can use this method to pass in a raw [Job + * resource](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs) + * object. + * + * @see [Jobs Overview]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs} + * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} + * + * @param {object} options Object in the form of a [Job resource](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs); + * @param {string} [options.jobId] Custom job id. + * @param {string} [options.jobPrefix] Prefix to apply to the job id. + * @param {string} [options.location] The geographic location of the job. + * Required except for US and EU. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request. + * @param {Job} callback.job The newly created job. + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * const options = { + * configuration: { + * query: { + * query: 'SELECT url FROM `publicdata.samples.github_nested` LIMIT 100' + * } + * } + * }; + * + * bigquery.createJob(options, function(err, job) { + * if (err) { + * // Error handling omitted. + * } + * + * job.getQueryResults(function(err, rows) {}); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bigquery.createJob(options).then(function(data) { + * const job = data[0]; + * + * return job.getQueryResults(); + * }); + */ + createJob( + options: JobOptions, + callback?: JobCallback + ): void | Promise { + const JOB_ID_PROVIDED = typeof options.jobId !== 'undefined'; + + const reqOpts = Object.assign({}, options); + let jobId = JOB_ID_PROVIDED ? reqOpts.jobId : uuid.v4(); + + if (reqOpts.jobId) { + delete reqOpts.jobId; + } + + if (reqOpts.jobPrefix) { + jobId = reqOpts.jobPrefix + jobId; + delete reqOpts.jobPrefix; + } + + reqOpts.jobReference = { + projectId: this.projectId, + jobId, + location: this.location, + }; + + if (options.location) { + reqOpts.jobReference.location = options.location; + delete reqOpts.location; + } + + const job = this.job(jobId!, { + location: reqOpts.jobReference.location, + }); + + this.request( + { + method: 'POST', + uri: '/jobs', + json: reqOpts, + }, + async (err, resp) => { + const ALREADY_EXISTS_CODE = 409; + + if (err) { + if ( + (err as common.ApiError).code === ALREADY_EXISTS_CODE && + !JOB_ID_PROVIDED + ) { + // The last insert attempt flaked, but the API still processed the + // request and created the job. Because of our "autoRetry" feature, + // we tried the request again, which tried to create it again, + // unnecessarily. We will get the job's metadata and treat it as if + // it just came back from the create call. + err = null; + [resp] = await job.getMetadata(); + } else { + callback!(err, null, resp); + return; + } + } + + if (resp.status.errors) { + err = new common.util.ApiError({ + errors: resp.status.errors, + response: resp, + } as GoogleErrorBody); + } + + // Update the location with the one used by the API. + job.location = resp.jobReference.location; + job.metadata = resp; + callback!(err, job, resp); + } + ); + } + + /** + * Create a reference to a dataset. + * + * @param {string} id ID of the dataset. + * @param {object} [options] Dataset options. + * @param {string} [options.location] The geographic location of the dataset. + * Required except for US and EU. + * @returns {Dataset} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('higher_education'); + */ + dataset(id: string, options?: DatasetOptions) { + if (typeof id !== 'string') { + throw new TypeError('A dataset ID is required.'); + } + + if (this.location) { + options = extend({location: this.location}, options); + } + return new Dataset(this, id, options); + } + + getDatasets(options?: GetDatasetsOptions): Promise; + getDatasets(options: GetDatasetsOptions, callback: DatasetsCallback): void; + getDatasets(callback: DatasetsCallback): void; + /** + * List all or some of the datasets in your project. + * + * @see [Datasets: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/list} + * + * @param {object} [options] Configuration object. + * @param {boolean} [options.all] List all datasets, including hidden ones. + * @param {boolean} [options.autoPaginate] Have pagination handled automatically. + * Default: true. + * @param {number} [options.maxApiCalls] Maximum number of API calls to make. + * @param {number} [options.maxResults] Maximum number of results to return. + * @param {string} [options.pageToken] Token returned from a previous call, to + * request the next page of results. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request + * @param {Dataset[]} callback.datasets The list of datasets in your project. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * bigquery.getDatasets(function(err, datasets) { + * if (!err) { + * // datasets is an array of Dataset objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * function manualPaginationCallback(err, datasets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bigquery.getDatasets(nextQuery, manualPaginationCallback); + * } + * } + * + * bigquery.getDatasets({ + * autoPaginate: false + * }, manualPaginationCallback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bigquery.getDatasets().then(function(datasets) {}); + */ + getDatasets( + optionsOrCallback?: GetDatasetsOptions | DatasetsCallback, + cb?: DatasetsCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + this.request( + { + uri: '/datasets', + qs: options, + }, + (err, resp) => { + if (err) { + callback!(err, null, null, resp); + return; + } + + let nextQuery: GetDatasetsOptions | null = null; + + if (resp.nextPageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.nextPageToken, + }); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const datasets = (resp.datasets || []).map( + (dataset: bigquery.IDataset) => { + const ds = this.dataset(dataset.datasetReference!.datasetId!, { + location: dataset.location!, + }); + + ds.metadata = dataset!; + return ds; + } + ); + + callback!(null, datasets, nextQuery, resp); + } + ); + } + + getJobs(options?: GetJobsOptions): Promise; + getJobs(options: GetJobsOptions, callback: GetJobsCallback): void; + getJobs(callback: GetJobsCallback): void; + /** + * Get all of the jobs from your project. + * + * @see [Jobs: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/list} + * + * @param {object} [options] Configuration object. + * @param {boolean} [options.allUsers] Display jobs owned by all users in the + * project. + * @param {boolean} [options.autoPaginate] Have pagination handled + * automatically. Default: true. + * @param {number} [options.maxApiCalls] Maximum number of API calls to make. + * @param {number} [options.maxResults] Maximum number of results to return. + * @param {string} [options.pageToken] Token returned from a previous call, to + * request the next page of results. + * @param {string} [options.projection] Restrict information returned to a set + * of selected fields. Acceptable values are "full", for all job data, and + * "minimal", to not include the job configuration. + * @param {string} [options.stateFilter] Filter for job state. Acceptable + * values are "done", "pending", and "running". Sending an array to this + * option performs a disjunction. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request + * @param {Job[]} callback.jobs The list of jobs in your + * project. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * bigquery.getJobs(function(err, jobs) { + * if (!err) { + * // jobs is an array of Job objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * function manualPaginationCallback(err, jobs, nextQuery, apiRespose) { + * if (nextQuery) { + * // More results exist. + * bigquery.getJobs(nextQuery, manualPaginationCallback); + * } + * } + * + * bigquery.getJobs({ + * autoPaginate: false + * }, manualPaginationCallback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bigquery.getJobs().then(function(data) { + * const jobs = data[0]; + * }); + */ + getJobs( + optionsOrCallback?: GetJobsOptions | GetJobsCallback, + cb?: GetJobsCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.request( + { + uri: '/jobs', + qs: options, + useQuerystring: true, + }, + (err, resp) => { + if (err) { + callback!(err, null, null, resp); + return; + } + let nextQuery: {} | null = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.nextPageToken, + }); + } + const jobs = (resp.jobs || []).map((jobObject: bigquery.IJob) => { + const job = this.job(jobObject.jobReference!.jobId!, { + location: jobObject.jobReference!.location!, + }); + job.metadata = jobObject!; + return job; + }); + callback!(null, jobs, nextQuery, resp); + } + ); + } + + /** + * Create a reference to an existing job. + * + * @param {string} id ID of the job. + * @param {object} [options] Configuration object. + * @param {string} [options.location] The geographic location of the job. + * Required except for US and EU. + * @returns {Job} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * const myExistingJob = bigquery.job('job-id'); + */ + job(id: string, options?: JobOptions) { + if (this.location) { + options = extend({location: this.location}, options); + } + return new Job(this, id, options); + } + + query(query: string, options?: QueryOptions): Promise; + query(query: Query, options?: QueryOptions): Promise; + query( + query: string, + options: QueryOptions, + callback?: QueryRowsCallback + ): void; + query( + query: Query, + options: QueryOptions, + callback?: SimpleQueryRowsCallback + ): void; + query(query: string, callback?: QueryRowsCallback): void; + query(query: Query, callback?: SimpleQueryRowsCallback): void; + /** + * Run a query scoped to your project. For manual pagination please refer to + * {@link BigQuery#createQueryJob}. + * + * @see [Jobs: query API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/query} + * + * @param {string|object} query A string SQL query or configuration object. + * For all available options, see + * [Jobs: query request + * body](https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#request-body). + * @param {string} [query.location] The geographic location of the job. + * Required except for US and EU. + * @param {string} [query.jobId] Custom id for the underlying job. + * @param {string} [query.jobPrefix] Prefix to apply to the underlying job id. + * @param {object|Array<*>} query.params For positional SQL parameters, provide + * an array of values. For named SQL parameters, provide an object which + * maps each named parameter to its value. The supported types are + * integers, floats, {@link BigQuery#date} objects, {@link BigQuery#datetime} + * objects, {@link BigQuery#time} objects, {@link BigQuery#timestamp} + * objects, Strings, Booleans, and Objects. + * @param {string} query.query A query string, following the BigQuery query + * syntax, of the query to execute. + * @param {object|Array<*>} query.types Provided types for query parameters. + * For positional SQL parameters, provide an array of types. For named + * SQL parameters, provide an object which maps each named parameter to + * its type. + * @param {boolean} [query.useLegacySql=false] Option to use legacy sql syntax. + * @param {object} [options] Configuration object for query results. + * @param {number} [options.maxResults] Maximum number of results to read. + * @param {number} [options.timeoutMs] How long to wait for the query to + * complete, in milliseconds, before returning. Default is 10 seconds. + * If the timeout passes before the job completes, an error will be returned + * and the 'jobComplete' field in the response will be false. + * @param {boolean|IntegerTypeCastOptions} [options.wrapIntegers=false] Wrap values + * of 'INT64' type in {@link BigQueryInt} objects. + * If a `boolean`, this will wrap values in {@link BigQueryInt} objects. + * If an `object`, this will return a value returned by + * `wrapIntegers.integerTypeCastFunction`. + * Please see {@link IntegerTypeCastOptions} for options descriptions. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request + * @param {array} callback.rows The list of results from your query. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * + * const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT + * 100'; + * + * bigquery.query(query, function(err, rows) { + * if (!err) { + * // rows is an array of results. + * } + * }); + * + * //- + * // Positional SQL parameters are supported. + * //- + * bigquery.query({ + * query: [ + * 'SELECT url', + * 'FROM `publicdata.samples.github_nested`', + * 'WHERE repository.owner = ?' + * ].join(' '), + * + * params: [ + * 'google' + * ] + * }, function(err, rows) {}); + * + * //- + * // Or if you prefer to name them, that's also supported. + * //- + * bigquery.query({ + * query: [ + * 'SELECT url', + * 'FROM `publicdata.samples.github_nested`', + * 'WHERE repository.owner = @owner' + * ].join(' '), + * params: { + * owner: 'google' + * } + * }, function(err, rows) {}); + * + * //- + * // Providing types for SQL parameters is supported. + * //- + * bigquery.query({ + * query: [ + * 'SELECT url', + * 'FROM `publicdata.samples.github_nested`', + * 'WHERE repository.owner = ?' + * ].join(' '), + * + * params: [ + * null + * ], + * + * types: ['string'] + * }, function(err, rows) {}); + * + * //- + * // If you need to use a `DATE`, `DATETIME`, `TIME`, or `TIMESTAMP` type in + * // your query, see {@link BigQuery#date}, {@link BigQuery#datetime}, + * // {@link BigQuery#time}, and {@link BigQuery#timestamp}. + * //- + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bigquery.query(query).then(function(data) { + * const rows = data[0]; + * }); + */ + query( + query: string | Query, + optionsOrCallback?: + | QueryOptions + | SimpleQueryRowsCallback + | QueryRowsCallback, + cb?: SimpleQueryRowsCallback | QueryRowsCallback + ): void | Promise | Promise { + let options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.createQueryJob(query, (err, job, resp) => { + if (err) { + (callback as SimpleQueryRowsCallback)(err, null, resp); + return; + } + if (typeof query === 'object' && query.dryRun) { + (callback as SimpleQueryRowsCallback)(null, [], resp); + return; + } + // The Job is important for the `queryAsStream_` method, so a new query + // isn't created each time results are polled for. + options = extend({job}, options); + job!.getQueryResults(options, callback as QueryRowsCallback); + }); + } + + /** + * This method will be called by `createQueryStream()`. It is required to + * properly set the `autoPaginate` option value. + * + * @private + */ + queryAsStream_( + query: Query, + optionsOrCallback?: QueryStreamOptions, + cb?: SimpleQueryRowsCallback + ) { + let options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + options = query.job + ? extend(query, options) + : extend(options, {autoPaginate: false}); + + if (query.job) { + query.job!.getQueryResults(options, callback as QueryRowsCallback); + return; + } + + this.query(query, options, callback); + } +} + +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator.extend(BigQuery, ['getDatasets', 'getJobs']); + +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(BigQuery, { + exclude: [ + 'dataset', + 'date', + 'datetime', + 'geography', + 'int', + 'job', + 'time', + 'timestamp', + ], +}); + +/** + * Date class for BigQuery. + */ +export class BigQueryDate { + value: string; + constructor(value: BigQueryDateOptions | string) { + if (typeof value === 'object') { + value = BigQuery.datetime(value).value; + } + this.value = value; + } +} + +/** + * Geography class for BigQuery. + */ +export class Geography { + value: string; + constructor(value: string) { + this.value = value; + } +} + +/** + * Timestamp class for BigQuery. + */ +export class BigQueryTimestamp { + value: string; + constructor(value: Date | string) { + this.value = new Date(value).toJSON(); + } +} + +/** + * Datetime class for BigQuery. + */ +export class BigQueryDatetime { + value: string; + constructor(value: BigQueryDatetimeOptions | string) { + if (typeof value === 'object') { + let time; + if (value.hours) { + time = BigQuery.time(value).value; + } + const y = value.year; + const m = value.month; + const d = value.day; + time = time ? ' ' + time : ''; + value = `${y}-${m}-${d}${time}`; + } else { + value = value.replace(/^(.*)T(.*)Z$/, '$1 $2'); + } + this.value = value as string; + } +} + +/** + * Time class for BigQuery. + */ +export class BigQueryTime { + value: string; + constructor(value: BigQueryTimeOptions | string) { + if (typeof value === 'object') { + const h = value.hours; + const m = value.minutes || 0; + const s = value.seconds || 0; + const f = is.defined(value.fractional) ? '.' + value.fractional : ''; + value = `${h}:${m}:${s}${f}`; + } + this.value = value as string; + } +} + +/** + * Build a BigQueryInt object. For long integers, a string can be provided. + * + * @class + * @param {string|number|IntegerTypeCastValue} value The 'INT64' value. + * @param {object} [typeCastOptions] Configuration to convert + * values of 'INT64' type to a custom value. Must provide an + * `integerTypeCastFunction` to handle conversion. + * @param {function} typeCastOptions.integerTypeCastFunction A custom user + * provided function to convert value. + * @param {string|string[]} [typeCastOptions.fields] Schema field + * names to be converted using `integerTypeCastFunction`. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const anInt = bigquery.int(7); + */ +export class BigQueryInt extends Number { + type: string; + value: string; + typeCastFunction?: Function; + private _schemaFieldName: string | undefined; + constructor( + value: string | number | IntegerTypeCastValue, + typeCastOptions?: IntegerTypeCastOptions + ) { + super(typeof value === 'object' ? value.integerValue : value); + this._schemaFieldName = + typeof value === 'object' ? value.schemaFieldName : undefined; + this.value = + typeof value === 'object' + ? value.integerValue.toString() + : value.toString(); + + this.type = 'BigQueryInt'; + + if (typeCastOptions) { + if (typeof typeCastOptions.integerTypeCastFunction !== 'function') { + throw new Error( + 'integerTypeCastFunction is not a function or was not provided.' + ); + } + + const typeCastFields = typeCastOptions.fields + ? arrify(typeCastOptions.fields) + : undefined; + + let customCast = true; + + if (typeCastFields) { + customCast = this._schemaFieldName + ? typeCastFields.includes(this._schemaFieldName) + ? true + : false + : false; + } + + customCast && + (this.typeCastFunction = typeCastOptions.integerTypeCastFunction); + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + valueOf(): any { + const shouldCustomCast = this.typeCastFunction ? true : false; + + if (shouldCustomCast) { + try { + return this.typeCastFunction!(this.value); + } catch (error) { + error.message = `integerTypeCastFunction threw an error:\n\n - ${error.message}`; + throw error; + } + } else { + // return this.value; + return BigQuery.decodeIntegerValue_({ + integerValue: this.value, + schemaFieldName: this._schemaFieldName, + }); + } + } + + toJSON(): Json { + return {type: this.type, value: this.value}; + } +} + +export interface Json { + [field: string]: string; +} diff --git a/src/dataset.ts b/src/dataset.ts index f937450c..d526b6db 100644 --- a/src/dataset.ts +++ b/src/dataset.ts @@ -14,44 +14,91 @@ * limitations under the License. */ -import {DecorateRequestOptions, DeleteCallback, ServiceObject} from '@google-cloud/common'; -import {paginator} from '@google-cloud/paginator'; +import { + DecorateRequestOptions, + DeleteCallback, + Metadata, + ServiceObject, +} from '@google-cloud/common'; +import {paginator, ResourceStream} from '@google-cloud/paginator'; import {promisifyAll} from '@google-cloud/promisify'; import * as extend from 'extend'; -import * as r from 'request'; -import {Duplex, Readable} from 'stream'; -import {teenyRequest} from 'teeny-request'; +import {Duplex} from 'stream'; -import {BigQuery, DatasetCallback, Query, QueryRowsResponse, SimpleQueryRowsCallback} from '.'; -import {JobCallback, JobResponse, Table, TableMetadata, TableOptions} from './table'; +import { + BigQuery, + DatasetCallback, + PagedCallback, + PagedRequest, + PagedResponse, + Query, + QueryRowsResponse, + ResourceCallback, + SimpleQueryRowsCallback, +} from './bigquery'; +import { + JobCallback, + JobResponse, + Table, + TableMetadata, + TableOptions, +} from './table'; +import {Model} from './model'; +import {Routine} from './routine'; +import bigquery from './types'; export interface DatasetDeleteOptions { force?: boolean; } -export interface DataSetOptions { +export interface DatasetOptions { location?: string; } -export interface CreateDatasetOptions {} +export type CreateDatasetOptions = bigquery.IDataset; -export interface GetTablesOptions { - autoPaginate?: boolean; - maxApiCalls?: number; - maxResults?: number; - pageToken?: string; -} +export type GetModelsOptions = PagedRequest; +export type GetModelsResponse = PagedResponse< + Model, + GetModelsOptions, + bigquery.IListModelsResponse +>; +export type GetModelsCallback = PagedCallback< + Model, + GetModelsOptions, + bigquery.IListModelsResponse +>; -export type GetTablesResponse = [Table[], r.Response]; -export interface GetTablesCallback { - (err: Error|null, tables?: Table[]|null, nextQuery?: {}|null, - apiResponse?: r.Response): void; -} +export type GetRoutinesOptions = PagedRequest; +export type GetRoutinesResponse = PagedResponse< + Routine, + GetRoutinesOptions, + bigquery.IListRoutinesResponse +>; +export type GetRoutinesCallback = PagedCallback< + Routine, + GetRoutinesOptions, + bigquery.IListRoutinesResponse +>; -export type TableResponse = [Table, r.Response]; -export interface TableCallback { - (err: Error|null, table?: Table|null, apiResponse?: r.Response): void; -} +export type GetTablesOptions = PagedRequest; +export type GetTablesResponse = PagedResponse< + Table, + GetTablesOptions, + bigquery.ITableList +>; +export type GetTablesCallback = PagedCallback< + Table, + GetTablesOptions, + bigquery.ITableList +>; + +export type RoutineMetadata = bigquery.IRoutine; +export type RoutineResponse = [Routine, bigquery.IRoutine]; +export type RoutineCallback = ResourceCallback; + +export type TableResponse = [Table, bigquery.ITable]; +export type TableCallback = ResourceCallback; /** * Interact with your BigQuery dataset. Create a Dataset instance with @@ -72,8 +119,10 @@ export interface TableCallback { class Dataset extends ServiceObject { bigQuery: BigQuery; location?: string; - getTablesStream: () => Readable; - constructor(bigQuery: BigQuery, id: string, options?: DataSetOptions) { + getModelsStream: (options?: GetModelsOptions) => ResourceStream; + getRoutinesStream: (options?: GetRoutinesOptions) => ResourceStream; + getTablesStream: (options?: GetTablesOptions) => ResourceStream; + constructor(bigQuery: BigQuery, id: string, options?: DatasetOptions) { const methods = { /** * Create a dataset. @@ -237,18 +286,20 @@ class Dataset extends ServiceObject { baseUrl: '/datasets', id, methods, - requestModule: teenyRequest as typeof r, - createMethod: - (id: string, optionsOrCallback?: CreateDatasetOptions|DatasetCallback, - cb?: DatasetCallback) => { - let options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const callback = typeof optionsOrCallback === 'function' ? - optionsOrCallback as DatasetCallback : - cb; - options = extend({}, options, {location: this.location}); - return bigQuery.createDataset(id, options, callback!); - } + createMethod: ( + id: string, + optionsOrCallback?: CreateDatasetOptions | DatasetCallback, + cb?: DatasetCallback + ) => { + let options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' + ? (optionsOrCallback as DatasetCallback) + : cb; + options = extend({}, options, {location: this.location}); + return bigQuery.createDataset(id, options, callback!); + }, }); if (options && options.location) { @@ -269,6 +320,62 @@ class Dataset extends ServiceObject { }, }); + /** + * List all or some of the {module:bigquery/model} objects in your project + * as a readable object stream. + * + * @param {object} [options] Configuration object. See + * {@link Dataset#getModels} for a complete list of options. + * @return {stream} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('institutions'); + * + * dataset.getModelsStream() + * .on('error', console.error) + * .on('data', (model) => {}) + * .on('end', () => { + * // All models have been retrieved + * }); + * + * @example + * dataset.getModelsStream() + * .on('data', function(model) { + * this.end(); + * }); + */ + this.getModelsStream = paginator.streamify('getModels'); + + /** + * List all or some of the {@link Routine} objects in your project as a + * readable object stream. + * + * @method Dataset#getRoutinesStream + * @param {GetRoutinesOptions} [options] Configuration object. + * @returns {stream} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('institutions'); + * + * dataset.getRoutinesStream() + * .on('error', console.error) + * .on('data', (routine) => {}) + * .on('end', () => { + * // All routines have been retrieved + * }); + * + * @example + * dataset.getRoutinesStream() + * .on('data', function(routine) { + * this.end(); + * }); + */ + this.getRoutinesStream = paginator.streamify('getRoutines'); + /** * List all or some of the {module:bigquery/table} objects in your project * as a readable object stream. @@ -298,11 +405,11 @@ class Dataset extends ServiceObject { * this.end(); * }); */ - this.getTablesStream = paginator.streamify('getTables'); + this.getTablesStream = paginator.streamify
If you anticipate many results, you can end a stream early to prevent unnecessary processing and API requests.If you anticipate many results, you can end a stream early to prevent unnecessary processing and API requests.
('getTables'); } - createQueryJob(options: string|Query): Promise; - createQueryJob(options: string|Query, callback: JobCallback): void; + createQueryJob(options: string | Query): Promise; + createQueryJob(options: string | Query, callback: JobCallback): void; /** * Run a query as a job. No results are immediately returned. Instead, your * callback will be executed with a {@link Job} object that you must @@ -315,8 +422,10 @@ class Dataset extends ServiceObject { * @param {function} [callback] See {@link BigQuery#createQueryJob} for full documentation of this method. * @returns {Promise} See {@link BigQuery#createQueryJob} for full documentation of this method. */ - createQueryJob(options: string|Query, callback?: JobCallback): - void|Promise { + createQueryJob( + options: string | Query, + callback?: JobCallback + ): void | Promise { if (typeof options === 'string') { options = { query: options, @@ -343,7 +452,7 @@ class Dataset extends ServiceObject { * documentation of this method. * @returns {stream} */ - createQueryStream(options: Query|string): Duplex { + createQueryStream(options: Query | string): Duplex { if (typeof options === 'string') { options = { query: options, @@ -359,9 +468,100 @@ class Dataset extends ServiceObject { return this.bigQuery.createQueryStream(options); } + createRoutine(id: string, config: RoutineMetadata): Promise; + createRoutine( + id: string, + config: RoutineMetadata, + callback: RoutineCallback + ): void; + /** + * @callback CreateRoutineCallback + * @param {?Error} err Request error, if any. + * @param {Routine} routine The newly created routine. + * @param {object} response The full API response body. + */ + /** + * @typedef {array} CreateRoutineResponse + * @property {Routine} 0 The newly created routine. + * @property {object} 1 The full API response body. + */ + /** + * Create a routine. + * + * @see [Routines: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert} + * + * @param {string} id The routine ID. + * @param {object} config A [routine resource]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine}. + * @param {CreateRoutineCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * + * const id = 'my-routine'; + * const config = { + * arguments: [{ + * name: 'x', + * dataType: { + * typeKind: 'INT64' + * } + * }], + * definitionBody: 'x * 3', + * routineType: 'SCALAR_FUNCTION', + * returnType: { + * typeKind: 'INT64' + * } + * }; + * + * dataset.createRoutine(id, config, (err, routine, apiResponse) => { + * if (!err) { + * // The routine was created successfully. + * } + * }); + * + * @example + * const [routine, apiResponse] = await dataset.createRoutine(id, config); + */ + createRoutine( + id: string, + config: RoutineMetadata, + callback?: RoutineCallback + ): void | Promise { + const json = Object.assign({}, config, { + routineReference: { + routineId: id, + datasetId: this.id, + projectId: this.bigQuery.projectId, + }, + }); + + this.request( + { + method: 'POST', + uri: '/routines', + json, + }, + (err, resp) => { + if (err) { + callback!(err, null, resp); + return; + } + + const routine = this.routine(resp.routineReference.routineId); + routine.metadata = resp; + callback!(null, routine, resp); + } + ); + } + createTable(id: string, options: TableMetadata): Promise; - createTable(id: string, options: TableMetadata, callback: TableCallback): - void; + createTable( + id: string, + options: TableMetadata, + callback: TableCallback + ): void; createTable(id: string, callback: TableCallback): void; /** * Create a table given a tableId or configuration object. @@ -407,14 +607,16 @@ class Dataset extends ServiceObject { * }); */ createTable( - id: string, optionsOrCallback?: TableMetadata|TableCallback, - cb?: TableCallback): void|Promise { + id: string, + optionsOrCallback?: TableMetadata | TableCallback, + cb?: TableCallback + ): void | Promise { const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; const body = Table.formatMetadata_(options as TableMetadata); - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any (body as any).tableReference = { datasetId: this.id, projectId: this.bigQuery.projectId, @@ -422,27 +624,28 @@ class Dataset extends ServiceObject { }; this.request( - { - method: 'POST', - uri: '/tables', - json: body, - }, - (err, resp) => { - if (err) { - callback!(err, null, resp); - return; - } - - const table = this.table(resp.tableReference.tableId, { - location: resp.location, - }); + { + method: 'POST', + uri: '/tables', + json: body, + }, + (err, resp) => { + if (err) { + callback!(err, null, resp); + return; + } - table.metadata = resp; - callback!(null, table, resp); + const table = this.table(resp.tableReference.tableId, { + location: resp.location, }); + + table.metadata = resp; + callback!(null, table, resp); + } + ); } - delete(options?: DatasetDeleteOptions): Promise<[r.Response]>; + delete(options?: DatasetDeleteOptions): Promise<[Metadata]>; delete(options: DatasetDeleteOptions, callback: DeleteCallback): void; delete(callback: DeleteCallback): void; /** @@ -480,26 +683,210 @@ class Dataset extends ServiceObject { * }); */ delete( - optionsOrCallback?: DeleteCallback|DatasetDeleteOptions, - callback?: DeleteCallback): void|Promise<[r.Response]> { + optionsOrCallback?: DeleteCallback | DatasetDeleteOptions, + callback?: DeleteCallback + ): void | Promise<[Metadata]> { const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const query = { deleteContents: !!options.force, }; this.request( - { - method: 'DELETE', - uri: '', - qs: query, - }, - callback!); + { + method: 'DELETE', + uri: '', + qs: query, + }, + callback! + ); + } + + getModels(options?: GetModelsOptions): Promise; + getModels(options: GetModelsOptions, callback: GetModelsCallback): void; + getModels(callback: GetModelsCallback): void; + /** + * Get a list of models. + * + * @see [Models: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/list} + * + * @param {object} [options] Configuration object. + * @param {boolean} [options.autoPaginate=true] Have pagination handled + * automatically. + * @param {number} [options.maxApiCalls] Maximum number of API calls to make. + * @param {number} [options.maxResults] Maximum number of results to return. + * @param {string} [options.pageToken] Token returned from a previous call, to + * request the next page of results. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request + * @param {Model[]} callback.models The list of models from + * your Dataset. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('institutions'); + * + * dataset.getModels((err, models) => { + * // models is an array of `Model` objects. + * }); + * + * @example + * function manualPaginationCallback(err, models, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * dataset.getModels(nextQuery, manualPaginationCallback); + * } + * } + * + * dataset.getModels({ + * autoPaginate: false + * }, manualPaginationCallback); + * + * @example + * dataset.getModels().then((data) => { + * const models = data[0]; + * }); + */ + getModels( + optsOrCb?: GetModelsOptions | GetModelsCallback, + cb?: GetModelsCallback + ): void | Promise { + const options = typeof optsOrCb === 'object' ? optsOrCb : {}; + const callback = typeof optsOrCb === 'function' ? optsOrCb : cb; + + this.request( + { + uri: '/models', + qs: options, + }, + (err: null | Error, resp: bigquery.IListModelsResponse) => { + if (err) { + callback!(err, null, null, resp); + return; + } + + let nextQuery: {} | null = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.nextPageToken, + }); + } + + const models = (resp.models || []).map(modelObject => { + const model = this.model(modelObject.modelReference!.modelId!); + model.metadata = modelObject; + return model; + }); + + callback!(null, models, nextQuery, resp); + } + ); + } + + getRoutines(options?: GetRoutinesOptions): Promise; + getRoutines(options: GetRoutinesOptions, callback: GetRoutinesCallback): void; + getRoutines(callback: GetRoutinesCallback): void; + /** + * @typedef {object} GetRoutinesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of results to return. + * @property {string} [pageToken] Token returned from a previous call, to + * request the next page of results. + */ + /** + * @callback GetRoutinesCallback + * @param {?Error} err Request error, if any. + * @param {Routine[]} routines List of routine objects. + * @param {GetRoutinesOptions} nextQuery If `autoPaginate` is set to true, + * this will be a prepared query for the next page of results. + * @param {object} response The full API response. + */ + /** + * @typedef {array} GetRoutinesResponse + * @property {Routine[]} 0 List of routine objects. + * @property {GetRoutinesOptions} 1 If `autoPaginate` is set to true, this + * will be a prepared query for the next page of results. + * @property {object} 2 The full API response. + */ + /** + * Get a list of routines. + * + * @see [Routines: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/list} + * + * @param {GetRoutinesOptions} [options] Request options. + * @param {GetRoutinesCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('institutions'); + * + * dataset.getRoutines((err, routines) => { + * // routines is an array of `Routine` objects. + * }); + * + * @example + * function manualPaginationCallback(err, routines, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * dataset.getRoutines(nextQuery, manualPaginationCallback); + * } + * } + * + * dataset.getRoutines({ + * autoPaginate: false + * }, manualPaginationCallback); + * + * @example + * const [routines] = await dataset.getRoutines(); + */ + getRoutines( + optsOrCb?: GetRoutinesOptions | GetRoutinesCallback, + cb?: GetRoutinesCallback + ): void | Promise { + const options = typeof optsOrCb === 'object' ? optsOrCb : {}; + const callback = typeof optsOrCb === 'function' ? optsOrCb : cb; + + this.request( + { + uri: '/routines', + qs: options, + }, + (err: Error | null, resp: bigquery.IListRoutinesResponse) => { + if (err) { + callback!(err, null, null, resp); + return; + } + + let nextQuery: {} | null = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.nextPageToken, + }); + } + + const routines = (resp.routines || []).map(metadata => { + const routine = this.routine(metadata.routineReference!.routineId!); + routine.metadata = metadata; + return routine; + }); + + callback!(null, routines, nextQuery, resp); + } + ); } + getTables(options?: GetTablesOptions): Promise; + getTables(options: GetTablesOptions, callback: GetTablesCallback): void; + getTables(callback: GetTablesCallback): void; /** * Get a list of tables. * @@ -548,45 +935,67 @@ class Dataset extends ServiceObject { * const tables = data[0]; * }); */ - getTables(options?: GetTablesOptions): Promise; - getTables(options: GetTablesOptions, callback: GetTablesCallback): void; - getTables(callback: GetTablesCallback): void; getTables( - optionsOrCallback?: GetTablesOptions|GetTablesCallback, - cb?: GetTablesCallback): void|Promise { + optionsOrCallback?: GetTablesOptions | GetTablesCallback, + cb?: GetTablesCallback + ): void | Promise { const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; this.request( - { - uri: '/tables', - qs: options, - }, - (err, resp) => { - if (err) { - callback!(err, null, null, resp); - return; - } - - let nextQuery: {}|null = null; - if (resp.nextPageToken) { - nextQuery = extend({}, options, { - pageToken: resp.nextPageToken, - }); - } - - // tslint:disable-next-line no-any - const tables = (resp.tables || []).map((tableObject: any) => { - const table = this.table(tableObject.tableReference.tableId, { - location: tableObject.location, - }); - table.metadata = tableObject; - return table; + { + uri: '/tables', + qs: options, + }, + (err, resp) => { + if (err) { + callback!(err, null, null, resp); + return; + } + + let nextQuery: {} | null = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.nextPageToken, + }); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const tables = (resp.tables || []).map((tableObject: any) => { + const table = this.table(tableObject.tableReference.tableId, { + location: tableObject.location, }); - callback!(null, tables, nextQuery, resp); + table.metadata = tableObject; + return table; }); + callback!(null, tables, nextQuery, resp); + } + ); + } + + /** + * Create a {@link Model} object. + * + * @throws {TypeError} if model ID is missing. + * + * @param {string} id The ID of the model. + * @return {Model} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('institutions'); + * + * const model = dataset.model('my-model'); + */ + model(id: string): Model { + if (typeof id !== 'string') { + throw new TypeError('A model ID is required.'); + } + + return new Model(this, id); } /** @@ -600,8 +1009,10 @@ class Dataset extends ServiceObject { */ query(options: Query): Promise; query(options: Query, callback: SimpleQueryRowsCallback): void; - query(options: Query, callback?: SimpleQueryRowsCallback): - void|Promise { + query( + options: Query, + callback?: SimpleQueryRowsCallback + ): void | Promise { if (typeof options === 'string') { options = { query: options, @@ -618,6 +1029,29 @@ class Dataset extends ServiceObject { return this.bigQuery.query(options, callback); } + /** + * Create a Routine object. + * + * @throws {TypeError} if routine ID is missing. + * + * @param {string} id The ID of the routine. + * @returns {Routine} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('institutions'); + * + * const routine = dataset.routine('my_routine'); + */ + routine(id: string): Routine { + if (typeof id !== 'string') { + throw new TypeError('A routine ID is required.'); + } + + return new Routine(this, id); + } + /** * Create a Table object. * @@ -646,10 +1080,11 @@ class Dataset extends ServiceObject { } options = extend( - { - location: this.location, - }, - options); + { + location: this.location, + }, + options + ); return new Table(this, id, options); } } @@ -658,7 +1093,7 @@ class Dataset extends ServiceObject { * * These methods can be auto-paginated. */ -paginator.extend(Dataset, ['getTables']); +paginator.extend(Dataset, ['getModels', 'getRoutines', 'getTables']); /*! Developer Documentation * @@ -666,7 +1101,7 @@ paginator.extend(Dataset, ['getTables']); * that a callback is omitted. */ promisifyAll(Dataset, { - exclude: ['table'], + exclude: ['model', 'routine', 'table'], }); /** diff --git a/src/index.ts b/src/index.ts index b8825bc5..f852cffa 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,5 +1,5 @@ /*! - * Copyright 2014 Google Inc. All Rights Reserved. + * Copyright 2019 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,1658 +14,122 @@ * limitations under the License. */ -import * as common from '@google-cloud/common'; -import {paginator} from '@google-cloud/paginator'; -import {promisifyAll} from '@google-cloud/promisify'; -import * as arrify from 'arrify'; -import {Big} from 'big.js'; -import * as extend from 'extend'; - -const format = require('string-format-obj'); -import * as is from 'is'; -import * as r from 'request'; -import * as uuid from 'uuid'; -import {teenyRequest} from 'teeny-request'; - -import {Dataset, DataSetOptions} from './dataset'; -import {Job, JobOptions} from './job'; -import {Table, TableField, TableSchema, TableRow, TableRowField, JobCallback, JobResponse, RowsCallback, RowsResponse, RowMetadata} from './table'; -import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; -import {Readable, Duplex} from 'stream'; - -// tslint:disable-next-line no-any -export type QueryRowsResponse = [any[], Query, r.Response]; -export interface QueryRowsCallback { - // tslint:disable-next-line no-any - (err: Error|null, rows?: any[]|null, nextQuery?: Query|null, - apiResponse?: r.Response): void; -} - -// tslint:disable-next-line no-any -export type SimpleQueryRowsResponse = [any[], r.Response]; -export interface SimpleQueryRowsCallback { - // tslint:disable-next-line no-any - (err: Error|null, rows?: any[]|null, apiResponse?: r.Response): void; -} - -export interface Query { - dryRun?: boolean; - location?: string; - job?: Job; - jobId?: string; - jobPrefix?: string; - // tslint:disable-next-line no-any - params?: any; - query?: string; - useLegacySql?: boolean; - maxResults?: number; - timeoutMs?: number; - pageToken?: string; - destination?: Table; - defaultDataset?: Dataset; -} - -export interface QueryOptions { - maxResults?: number; - timeoutMs?: number; - autoPaginate?: boolean; -} - -export interface DatasetResource { - etag?: string; - id?: string; - selfLink?: string; - datasetReference?: {datasetId?: string, projectId?: string}; - friendlyName?: string; - description?: string; - defaultTableExpirationMs?: number; - defaultPartitionExpirationMs?: number; - labels?: {[index: string]: string}; - access?: [{ - role?: string; - userByEmail?: string; - groupByEmail?: string; - domain?: string; - specialGroup?: string; - view?: {projectId?: string; datasetId?: string; tableId?: string;} - }]; - creationTime?: number; - lastModifiedTime?: number; - location?: string; -} - -export interface ValueType { - type: string; - arrayType?: ValueType; - structTypes?: Array<{name: string; type: ValueType;}>; -} - -export interface GetDatasetsOptions { - all?: boolean; - filter?: string; - autoPaginate?: boolean; - maxApiCalls?: number; - maxResults?: number; - pageToken?: string; -} - -export type DatasetsResponse = [Dataset[], GetDatasetsOptions, r.Response]; -export interface DatasetsCallback { - (err: Error|null, datasets?: Dataset[]|null, - nextQuery?: GetDatasetsOptions|null, apiResponse?: r.Response): void; -} - -export type DatasetResponse = [Dataset, r.Response]; -export interface DatasetCallback { - (err: Error|null, dataset?: Dataset|null, apiResponse?: r.Response): void; -} - -export interface GetJobsOptions { - allUsers?: boolean; - autoPaginate?: boolean; - maxApiCalls?: number; - maxResults?: number; - pageToken?: string; - projection?: 'full'|'minimal'; - stateFilter?: 'done'|'pending'|'running'; -} - -export type GetJobsResponse = [Job[], r.Response]; -export interface GetJobsCallback { - (err: Error|null, jobs: Job[]|null, nextQuery?: {}|null, - apiResponse?: r.Response): void; -} - -export interface BigQueryTimeOptions { - hours?: number|string; - minutes?: number|string; - seconds?: number|string; - fractional?: number|string; -} - -export interface BigQueryDateOptions { - year?: number|string; - month?: number|string; - day?: number|string; -} - -export interface BigQueryDatetimeOptions { - year?: string|number; - month?: string|number; - day?: string|number; - hours?: string|number; - minutes?: string|number; - seconds?: string|number; - fractional?: string|number; -} - -export interface QueryParameter { - name?: string; - parameterType: {type: string;}; - parameterValue: {arrayValues?: Array<{}>; structValues?: {}; value?: {}}; -} - -/** - * @typedef {object} BigQueryOptions - * @property {string} [projectId] The project ID from the Google Developer's - * Console, e.g. 'grape-spaceship-123'. We will also check the environment - * variable `GCLOUD_PROJECT` for your project ID. If your app is running in - * an environment which supports {@link - * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application - * Application Default Credentials}, your project ID will be detected - * automatically. - * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key - * downloaded from the Google Developers Console. If you provide a path to a - * JSON file, the `projectId` option above is not necessary. NOTE: .pem and - * .p12 require you to specify the `email` option as well. - * @property {string} [token] An OAUTH access token. If provided, we will not - * manage fetching, re-using, and re-minting access tokens. - * @property {string} [email] Account email address. Required when using a .pem - * or .p12 keyFilename. - * @property {object} [credentials] Credentials object. - * @property {string} [credentials.client_email] - * @property {string} [credentials.private_key] - * @property {boolean} [autoRetry=true] Automatically retry requests if the - * response is related to rate limits or certain intermittent server errors. - * We will exponentially backoff subsequent requests by default. - * @property {number} [maxRetries=3] Maximum number of automatic retries - * attempted before returning the error. - * @property {Constructor} [promise] Custom promise module to use instead of - * native Promises. - * @property {string} [location] The geographic location of all datasets and - * jobs referenced and created through the client. - * @property {string[]} [scopes] Additional OAuth scopes to use in requests. For - * example, to access an external data source, you may need the - * `https://www.googleapis.com/auth/drive.readonly` scope. - */ -export interface BigQueryOptions extends common.GoogleAuthOptions { - autoRetry?: boolean; - maxRetries?: number; - location?: string; -} - -/** - * In the following examples from this page and the other modules (`Dataset`, - * `Table`, etc.), we are going to be using a dataset from - * [data.gov](http://goo.gl/f2SXcb) of higher education institutions. - * - * We will create a table with the correct schema, import the public CSV file - * into that table, and query it for data. - * - * @class - * - * @see [What is BigQuery?]{@link https://cloud.google.com/bigquery/what-is-bigquery} - * - * @param {BigQueryOptions} options Constructor options. - * - * @example npm install --save - * @google-cloud/bigquery - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * - * @example const bigquery = new BigQuery(); - * - * @example const bigquery = new BigQuery({ projectId: - * 'your-project-id', keyFilename: '/path/to/keyfile.json' - * }); - * - * @example - * region_tag:bigquery_quickstart - * Full quickstart example: - */ -export class BigQuery extends common.Service { - location?: string; - - createQueryStream: (options?: Query|string) => Duplex; - getDatasetsStream: () => Readable; - getJobsStream: () => Readable; - - constructor(options?: BigQueryOptions) { - options = options || {}; - const config = { - baseUrl: 'https://www.googleapis.com/bigquery/v2', - scopes: ['https://www.googleapis.com/auth/bigquery'], - packageJson: require('../../package.json'), - requestModule: teenyRequest as typeof r, - }; - - if (options.scopes) { - config.scopes = config.scopes.concat(options.scopes); - } - - super(config, options); - - /** - * @name BigQuery#location - * @type {string} - */ - this.location = options.location; - /** - * Run a query scoped to your project as a readable object stream. - * - * @param {object} query Configuration object. See {@link Query} for a complete - * list of options. - * @returns {stream} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT - * 100'; - * - * bigquery.createQueryStream(query) - * .on('error', console.error) - * .on('data', function(row) { - * // row is a result from your query. - * }) - * .on('end', function() { - * // All rows retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * bigquery.createQueryStream(query) - * .on('data', function(row) { - * this.end(); - * }); - */ - this.createQueryStream = paginator.streamify('queryAsStream_'); - - /** - * List all or some of the {@link Dataset} objects in your project as - * a readable object stream. - * - * @param {object} [options] Configuration object. See - * {@link BigQuery#getDatasets} for a complete list of options. - * @returns {stream} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * bigquery.getDatasetsStream() - * .on('error', console.error) - * .on('data', function(dataset) { - * // dataset is a Dataset object. - * }) - * .on('end', function() { - * // All datasets retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * bigquery.getDatasetsStream() - * .on('data', function(dataset) { - * this.end(); - * }); - */ - this.getDatasetsStream = paginator.streamify('getDatasets'); - - /** - * List all or some of the {@link Job} objects in your project as a - * readable object stream. - * - * @param {object} [options] Configuration object. See - * {@link BigQuery#getJobs} for a complete list of options. - * @returns {stream} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * bigquery.getJobsStream() - * .on('error', console.error) - * .on('data', function(job) { - * // job is a Job object. - * }) - * .on('end', function() { - * // All jobs retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * bigquery.getJobsStream() - * .on('data', function(job) { - * this.end(); - * }); - */ - this.getJobsStream = paginator.streamify('getJobs'); - } - - /** - * Merge a rowset returned from the API with a table schema. - * - * @private - * - * @param {object} schema - * @param {array} rows - * @returns {array} Fields using their matching names from the table's schema. - */ - static mergeSchemaWithRows_( - schema: TableSchema|TableField, rows: TableRow[]) { - return arrify(rows).map(mergeSchema).map(flattenRows); - function mergeSchema(row: TableRow) { - return row.f.map((field: TableRowField, index: number) => { - const schemaField = schema.fields![index]; - let value = field.v; - if (schemaField.mode === 'REPEATED') { - value = (value as TableRowField[]).map(val => { - return convert(schemaField, val.v); - }); - } else { - value = convert(schemaField, value); - } - // tslint:disable-next-line no-any - const fieldObject: any = {}; - fieldObject[schemaField.name] = value; - return fieldObject; - }); - } - - // tslint:disable-next-line no-any - function convert(schemaField: TableField, value: any) { - if (is.null(value)) { - return value; - } - - switch (schemaField.type) { - case 'BOOLEAN': - case 'BOOL': { - value = value.toLowerCase() === 'true'; - break; - } - case 'BYTES': { - value = Buffer.from(value, 'base64'); - break; - } - case 'FLOAT': - case 'FLOAT64': { - value = Number(value); - break; - } - case 'INTEGER': - case 'INT64': { - value = Number(value); - break; - } - case 'NUMERIC': { - value = new Big(value); - break; - } - case 'RECORD': { - value = BigQuery.mergeSchemaWithRows_(schemaField, value).pop(); - break; - } - case 'DATE': { - value = BigQuery.date(value); - break; - } - case 'DATETIME': { - value = BigQuery.datetime(value); - break; - } - case 'TIME': { - value = BigQuery.time(value); - break; - } - case 'TIMESTAMP': { - value = BigQuery.timestamp(new Date(value * 1000)); - break; - } - default: - break; - } - - return value; - } - - // tslint:disable-next-line no-any - function flattenRows(rows: any[]) { - return rows.reduce((acc, row) => { - const key = Object.keys(row)[0]; - acc[key] = row[key]; - return acc; - }, {}); - } - } - - /** - * The `DATE` type represents a logical calendar date, independent of time - * zone. It does not represent a specific 24-hour time period. Rather, a given - * DATE value represents a different 24-hour period when interpreted in - * different time zones, and may represent a shorter or longer day during - * Daylight Savings Time transitions. - * - * @param {object|string} value The date. If a string, this should be in the - * format the API describes: `YYYY-[M]M-[D]D`. - * Otherwise, provide an object. - * @param {string|number} value.year Four digits. - * @param {string|number} value.month One or two digits. - * @param {string|number} value.day One or two digits. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * const date = bigquery.date('2017-01-01'); - * - * //- - * // Alternatively, provide an object. - * //- - * const date2 = bigquery.date({ - * year: 2017, - * month: 1, - * day: 1 - * }); - */ - static date(value: BigQueryDateOptions|string) { - return new BigQueryDate(value); - } - - /** - * @param {object|string} value The date. If a string, this should be in the - * format the API describes: `YYYY-[M]M-[D]D`. - * Otherwise, provide an object. - * @param {string|number} value.year Four digits. - * @param {string|number} value.month One or two digits. - * @param {string|number} value.day One or two digits. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const date = BigQuery.date('2017-01-01'); - * - * //- - * // Alternatively, provide an object. - * //- - * const date2 = BigQuery.date({ - * year: 2017, - * month: 1, - * day: 1 - * }); - */ - - date(value: BigQueryDateOptions|string) { - return BigQuery.date(value); - } - - /** - * A `DATETIME` data type represents a point in time. Unlike a `TIMESTAMP`, - * this does not refer to an absolute instance in time. Instead, it is the - * civil time, or the time that a user would see on a watch or calendar. - * - * @method BigQuery.datetime - * @param {object|string} value The time. If a string, this should be in the - * format the API describes: `YYYY-[M]M-[D]D[ [H]H:[M]M:[S]S[.DDDDDD]]`. - * Otherwise, provide an object. - * @param {string|number} value.year Four digits. - * @param {string|number} value.month One or two digits. - * @param {string|number} value.day One or two digits. - * @param {string|number} [value.hours] One or two digits (`00` - `23`). - * @param {string|number} [value.minutes] One or two digits (`00` - `59`). - * @param {string|number} [value.seconds] One or two digits (`00` - `59`). - * @param {string|number} [value.fractional] Up to six digits for microsecond - * precision. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const datetime = BigQuery.datetime('2017-01-01 13:00:00'); - * - * //- - * // Alternatively, provide an object. - * //- - * const datetime = BigQuery.datetime({ - * year: 2017, - * month: 1, - * day: 1, - * hours: 14, - * minutes: 0, - * seconds: 0 - * }); - */ - - /** - * A `DATETIME` data type represents a point in time. Unlike a `TIMESTAMP`, - * this does not refer to an absolute instance in time. Instead, it is the - * civil time, or the time that a user would see on a watch or calendar. - * - * @method BigQuery#datetime - * @param {object|string} value The time. If a string, this should be in the - * format the API describes: `YYYY-[M]M-[D]D[ [H]H:[M]M:[S]S[.DDDDDD]]`. - * Otherwise, provide an object. - * @param {string|number} value.year Four digits. - * @param {string|number} value.month One or two digits. - * @param {string|number} value.day One or two digits. - * @param {string|number} [value.hours] One or two digits (`00` - `23`). - * @param {string|number} [value.minutes] One or two digits (`00` - `59`). - * @param {string|number} [value.seconds] One or two digits (`00` - `59`). - * @param {string|number} [value.fractional] Up to six digits for microsecond - * precision. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * const datetime = bigquery.datetime('2017-01-01 13:00:00'); - * - * //- - * // Alternatively, provide an object. - * //- - * const datetime = bigquery.datetime({ - * year: 2017, - * month: 1, - * day: 1, - * hours: 14, - * minutes: 0, - * seconds: 0 - * }); - */ - static datetime(value: BigQueryDatetimeOptions|string) { - return new BigQueryDatetime(value); - } - - datetime(value: BigQueryDatetimeOptions|string) { - return BigQuery.datetime(value); - } - - /** - * A `TIME` data type represents a time, independent of a specific date. - * - * @method BigQuery.time - * @param {object|string} value The time. If a string, this should be in the - * format the API describes: `[H]H:[M]M:[S]S[.DDDDDD]`. Otherwise, provide - * an object. - * @param {string|number} [value.hours] One or two digits (`00` - `23`). - * @param {string|number} [value.minutes] One or two digits (`00` - `59`). - * @param {string|number} [value.seconds] One or two digits (`00` - `59`). - * @param {string|number} [value.fractional] Up to six digits for microsecond - * precision. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const time = BigQuery.time('14:00:00'); // 2:00 PM - * - * //- - * // Alternatively, provide an object. - * //- - * const time = BigQuery.time({ - * hours: 14, - * minutes: 0, - * seconds: 0 - * }); - */ - - /** - * A `TIME` data type represents a time, independent of a specific date. - * - * @method BigQuery#time - * @param {object|string} value The time. If a string, this should be in the - * format the API describes: `[H]H:[M]M:[S]S[.DDDDDD]`. Otherwise, provide - * an object. - * @param {string|number} [value.hours] One or two digits (`00` - `23`). - * @param {string|number} [value.minutes] One or two digits (`00` - `59`). - * @param {string|number} [value.seconds] One or two digits (`00` - `59`). - * @param {string|number} [value.fractional] Up to six digits for microsecond - * precision. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * const time = bigquery.time('14:00:00'); // 2:00 PM - * - * //- - * // Alternatively, provide an object. - * //- - * const time = bigquery.time({ - * hours: 14, - * minutes: 0, - * seconds: 0 - * }); - */ - static time(value: BigQueryTimeOptions|string) { - return new BigQueryTime(value); - } - - time(value: BigQueryTimeOptions|string) { - return BigQuery.time(value); - } - - /** - * A timestamp represents an absolute point in time, independent of any time - * zone or convention such as Daylight Savings Time. - * - * @method BigQuery.timestamp - * @param {Date|string} value The time. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const timestamp = BigQuery.timestamp(new Date()); - */ - - /** - * A timestamp represents an absolute point in time, independent of any time - * zone or convention such as Daylight Savings Time. - * - * @method BigQuery#timestamp - * @param {Date|string} value The time. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * const timestamp = bigquery.timestamp(new Date()); - */ - static timestamp(value: Date|string) { - return new BigQueryTimestamp(value); - } - - timestamp(value: Date|string) { - return BigQuery.timestamp(value); - } - - /** - * Detect a value's type. - * - * @private - * - * @throws {error} If the type could not be detected. - * - * @see [Data Type]{@link https://cloud.google.com/bigquery/data-types} - * - * @param {*} value The value. - * @returns {string} The type detected from the value. - */ - // tslint:disable-next-line no-any - static getType_(value: any): ValueType { - let typeName; - - if (value instanceof BigQueryDate) { - typeName = 'DATE'; - } else if (value instanceof BigQueryDatetime) { - typeName = 'DATETIME'; - } else if (value instanceof BigQueryTime) { - typeName = 'TIME'; - } else if (value instanceof BigQueryTimestamp) { - typeName = 'TIMESTAMP'; - } else if (value instanceof Buffer) { - typeName = 'BYTES'; - } else if (value instanceof Big) { - typeName = 'NUMERIC'; - } else if (is.array(value)) { - return { - type: 'ARRAY', - arrayType: BigQuery.getType_(value[0]), - }; - } else if (is.boolean(value)) { - typeName = 'BOOL'; - } else if (is.number(value)) { - typeName = (value as number) % 1 === 0 ? 'INT64' : 'FLOAT64'; - } else if (is.object(value)) { - return { - type: 'STRUCT', - structTypes: Object.keys(value).map(prop => { - return { - name: prop, - type: BigQuery.getType_(value[prop]), - }; - }), - }; - } else if (is.string(value)) { - typeName = 'STRING'; - } - - if (!typeName) { - throw new Error([ - 'This value could not be translated to a BigQuery data type.', - value, - ].join('\n')); - } - - return { - type: typeName, - }; - } - - /** - * Convert a value into a `queryParameter` object. - * - * @private - * - * @see [Jobs.query API Reference Docs (see `queryParameters`)]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#request-body} - * - * @param {*} value The value. - * @returns {object} A properly-formed `queryParameter` object. - */ - // tslint:disable-next-line no-any - static valueToQueryParameter_(value: any) { - if (is.date(value)) { - value = BigQuery.timestamp(value as Date); - } - - const queryParameter: QueryParameter = { - parameterType: BigQuery.getType_(value), - parameterValue: {}, - }; - - const typeName = queryParameter.parameterType.type; - - if (typeName.indexOf('TIME') > -1 || typeName.indexOf('DATE') > -1) { - value = value.value; - } - - if (typeName === 'ARRAY') { - queryParameter.parameterValue.arrayValues = - (value as Array<{}>).map(value => { - return { - value, - }; - }); - } else if (typeName === 'STRUCT') { - queryParameter.parameterValue.structValues = - Object.keys(value).reduce((structValues, prop) => { - const nestedQueryParameter = - BigQuery.valueToQueryParameter_(value[prop]); - // tslint:disable-next-line no-any - (structValues as any)[prop] = nestedQueryParameter.parameterValue; - return structValues; - }, {}); - } else { - queryParameter.parameterValue.value = value; - } - - return queryParameter; - } - - createDataset(id: string, options?: DatasetResource): - Promise; - createDataset( - id: string, options: DatasetResource, callback: DatasetCallback): void; - createDataset(id: string, callback: DatasetCallback): void; - /** - * Create a dataset. - * - * @see [Datasets: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/insert} - * - * @param {string} id ID of the dataset to create. - * @param {object} [options] See a - * [Dataset - * resource](https://cloud.google.com/bigquery/docs/reference/v2/datasets#resource). - * @param {function} [callback] The callback function. - * @param {?error} callback.err An error returned while making this request - * @param {Dataset} callback.dataset The newly created dataset - * @param {object} callback.apiResponse The full API response. - * @returns {Promise} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * bigquery.createDataset('my-dataset', function(err, dataset, apiResponse) - * {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bigquery.createDataset('my-dataset').then(function(data) { - * const dataset = data[0]; - * const apiResponse = data[1]; - * }); - */ - createDataset( - id: string, optionsOrCallback?: DatasetResource|DatasetCallback, - cb?: DatasetCallback): void|Promise { - const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - - this.request( - { - method: 'POST', - uri: '/datasets', - json: extend( - true, { - location: this.location, - }, - options, { - datasetReference: { - datasetId: id, - }, - }), - }, - (err, resp) => { - if (err) { - callback!(err, null, resp); - return; - } - - const dataset = this.dataset(id); - dataset.metadata = resp; - - callback!(null, dataset, resp); - }); - } - - createQueryJob(options: Query|string): Promise; - createQueryJob(options: Query|string, callback: JobCallback): void; - /** - * Run a query as a job. No results are immediately returned. Instead, your - * callback will be executed with a {@link Job} object that you must - * ping for the results. See the Job documentation for explanations of how to - * check on the status of the job. - * - * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} - * - * @param {object|string} options The configuration object. This must be in - * the format of the [`configuration.query`](http://goo.gl/wRpHvR) - * property of a Jobs resource. If a string is provided, this is used as the - * query string, and all other options are defaulted. - * @param {Table} [options.destination] The table to save the - * query's results to. If omitted, a new table will be created. - * @param {boolean} [options.dryRun] If set, don't actually run this job. A - * valid query will update the job with processing statistics. These can - * be accessed via `job.metadata`. - * @param {string} [options.location] The geographic location of the job. - * Required except for US and EU. - * @param {string} [options.jobId] Custom job id. - * @param {string} [options.jobPrefix] Prefix to apply to the job id. - * @param {string} options.query A query string, following the BigQuery query - * syntax, of the query to execute. - * @param {boolean} [options.useLegacySql=false] Option to use legacy sql syntax. - * @param {function} [callback] The callback function. - * @param {?error} callback.err An error returned while making this request. - * @param {Job} callback.job The newly created job for your query. - * @param {object} callback.apiResponse The full API response. - * @returns {Promise} - * - * @throws {Error} If a query is not specified. - * @throws {Error} If a Table is not provided as a destination. - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT - * 100'; - * - * //- - * // You may pass only a query string, having a new table created to store - * the - * // results of the query. - * //- - * bigquery.createQueryJob(query, function(err, job) {}); - * - * //- - * // You can also control the destination table by providing a - * // {@link Table} object. - * //- - * bigquery.createQueryJob({ - * destination: bigquery.dataset('higher_education').table('institutions'), - * query: query - * }, function(err, job) {}); - * - * //- - * // After you have run `createQueryJob`, your query will execute in a job. - * Your - * // callback is executed with a {@link Job} object so that you may - * // check for the results. - * //- - * bigquery.createQueryJob(query, function(err, job) { - * if (!err) { - * job.getQueryResults(function(err, rows, apiResponse) {}); - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bigquery.createQueryJob(query).then(function(data) { - * const job = data[0]; - * const apiResponse = data[1]; - * - * return job.getQueryResults(); - * }); - */ - createQueryJob(opts: Query|string, callback?: JobCallback): - void|Promise { - const options = typeof opts === 'object' ? opts : {query: opts}; - if ((!options || !options.query) && !options.pageToken) { - throw new Error('A SQL query string is required.'); - } - - // tslint:disable-next-line no-any - const query: any = extend( - true, { - useLegacySql: false, - }, - options); - - if (options.destination) { - if (!(options.destination instanceof Table)) { - throw new Error('Destination must be a Table object.'); - } - - query.destinationTable = { - datasetId: options.destination.dataset.id, - projectId: options.destination.dataset.bigQuery.projectId, - tableId: options.destination.id, - }; - - delete query.destination; - } - - if (query.params) { - query.parameterMode = is.array(query.params) ? 'positional' : 'named'; - - if (query.parameterMode === 'named') { - query.queryParameters = []; - - // tslint:disable-next-line forin - for (const namedParamater in query.params) { - const value = query.params[namedParamater]; - const queryParameter = BigQuery.valueToQueryParameter_(value); - queryParameter.name = namedParamater; - query.queryParameters.push(queryParameter); - } - } else { - query.queryParameters = - query.params.map(BigQuery.valueToQueryParameter_); - } - - delete query.params; - } - - // tslint:disable-next-line no-any - const reqOpts: any = { - configuration: { - query, - }, - }; - - if (query.dryRun) { - reqOpts.configuration.dryRun = query.dryRun; - delete query.dryRun; - } - - if (query.jobPrefix) { - reqOpts.jobPrefix = query.jobPrefix; - delete query.jobPrefix; - } - - if (query.location) { - reqOpts.location = query.location; - delete query.location; - } - - if (query.jobId) { - reqOpts.jobId = query.jobId; - delete query.jobId; - } - - this.createJob(reqOpts, callback!); - } - - createJob(options: JobOptions): Promise; - createJob(options: JobOptions, callback: JobCallback): void; - /** - * Creates a job. Typically when creating a job you'll have a very specific - * task in mind. For this we recommend one of the following methods: - * - * - {@link BigQuery#createQueryJob} - * - {@link Table#createCopyJob} - * - {@link Table#createCopyFromJob} - * - {@link Table#createExtractJob} - * - {@link Table#createLoadJob} - * - * However in the event you need a finer level of control over the job - * creation, you can use this method to pass in a raw [Job - * resource](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs) - * object. - * - * @see [Jobs Overview]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs} - * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} - * - * @param {object} options Object in the form of a [Job resource](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs); - * @param {string} [options.jobId] Custom job id. - * @param {string} [options.jobPrefix] Prefix to apply to the job id. - * @param {string} [options.location] The geographic location of the job. - * Required except for US and EU. - * @param {function} [callback] The callback function. - * @param {?error} callback.err An error returned while making this request. - * @param {Job} callback.job The newly created job. - * @param {object} callback.apiResponse The full API response. - * @returns {Promise} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * const options = { - * configuration: { - * query: { - * query: 'SELECT url FROM `publicdata.samples.github_nested` LIMIT 100' - * } - * } - * }; - * - * bigquery.createJob(options, function(err, job) { - * if (err) { - * // Error handling omitted. - * } - * - * job.getQueryResults(function(err, rows) {}); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bigquery.createJob(options).then(function(data) { - * const job = data[0]; - * - * return job.getQueryResults(); - * }); - */ - createJob(options: JobOptions, callback?: JobCallback): - void|Promise { - // tslint:disable-next-line no-any - const reqOpts: any = extend({}, options); - let jobId = reqOpts.jobId || uuid.v4(); - - if (reqOpts.jobId) { - delete reqOpts.jobId; - } - - if (reqOpts.jobPrefix) { - jobId = reqOpts.jobPrefix + jobId; - delete reqOpts.jobPrefix; - } - - reqOpts.jobReference = { - projectId: this.projectId, - jobId, - location: this.location, - }; - - if (options.location) { - reqOpts.jobReference.location = options.location; - delete reqOpts.location; - } - - this.request( - { - method: 'POST', - uri: '/jobs', - json: reqOpts, - }, - (err, resp) => { - if (err) { - callback!(err, null, resp); - return; - } - - if (resp.status.errors) { - err = new common.util.ApiError({ - errors: resp.status.errors, - response: resp, - } as GoogleErrorBody); - } - - const job = this.job(jobId, { - location: resp.jobReference.location, - }); - - job.metadata = resp; - callback!(err, job, resp); - }); - } - - /** - * Create a reference to a dataset. - * - * @param {string} id ID of the dataset. - * @param {object} [options] Dataset options. - * @param {string} [options.location] The geographic location of the dataset. - * Required except for US and EU. - * @returns {Dataset} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * const dataset = bigquery.dataset('higher_education'); - */ - dataset(id: string, options?: DataSetOptions) { - if (typeof id !== 'string') { - throw new TypeError('A dataset ID is required.'); - } - - if (this.location) { - options = extend({location: this.location}, options); - } - return new Dataset(this, id, options); - } - - getDatasets(options?: GetDatasetsOptions): Promise; - getDatasets(options: GetDatasetsOptions, callback: DatasetsCallback): void; - getDatasets(callback: DatasetsCallback): void; - /** - * List all or some of the datasets in your project. - * - * @see [Datasets: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/list} - * - * @param {object} [options] Configuration object. - * @param {boolean} [options.all] List all datasets, including hidden ones. - * @param {boolean} [options.autoPaginate] Have pagination handled automatically. - * Default: true. - * @param {number} [options.maxApiCalls] Maximum number of API calls to make. - * @param {number} [options.maxResults] Maximum number of results to return. - * @param {string} [options.pageToken] Token returned from a previous call, to - * request the next page of results. - * @param {function} [callback] The callback function. - * @param {?error} callback.err An error returned while making this request - * @param {Dataset[]} callback.datasets The list of datasets in your project. - * @returns {Promise} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * bigquery.getDatasets(function(err, datasets) { - * if (!err) { - * // datasets is an array of Dataset objects. - * } - * }); - * - * //- - * // To control how many API requests are made and page through the results - * // manually, set `autoPaginate` to `false`. - * //- - * function manualPaginationCallback(err, datasets, nextQuery, apiResponse) { - * if (nextQuery) { - * // More results exist. - * bigquery.getDatasets(nextQuery, manualPaginationCallback); - * } - * } - * - * bigquery.getDatasets({ - * autoPaginate: false - * }, manualPaginationCallback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bigquery.getDatasets().then(function(datasets) {}); - */ - getDatasets( - optionsOrCallback?: GetDatasetsOptions|DatasetsCallback, - cb?: DatasetsCallback): void|Promise { - const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - - this.request( - { - uri: '/datasets', - qs: options, - }, - (err, resp) => { - if (err) { - callback!(err, null, null, resp); - return; - } - - let nextQuery: GetDatasetsOptions|null = null; - - if (resp.nextPageToken) { - nextQuery = extend({}, options, { - pageToken: resp.nextPageToken, - }); - } - - // tslint:disable-next-line no-any - const datasets = (resp.datasets || []).map((dataset: any) => { - const ds = this.dataset(dataset.datasetReference.datasetId, { - location: dataset.location, - }); - - ds.metadata = dataset; - return ds; - }); - - callback!(null, datasets, nextQuery, resp); - }); - } - - getJobs(options?: GetJobsOptions): Promise; - getJobs(options: GetJobsOptions, callback: GetJobsCallback): void; - getJobs(callback: GetJobsCallback): void; - /** - * Get all of the jobs from your project. - * - * @see [Jobs: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/list} - * - * @param {object} [options] Configuration object. - * @param {boolean} [options.allUsers] Display jobs owned by all users in the - * project. - * @param {boolean} [options.autoPaginate] Have pagination handled - * automatically. Default: true. - * @param {number} [options.maxApiCalls] Maximum number of API calls to make. - * @param {number} [options.maxResults] Maximum number of results to return. - * @param {string} [options.pageToken] Token returned from a previous call, to - * request the next page of results. - * @param {string} [options.projection] Restrict information returned to a set - * of selected fields. Acceptable values are "full", for all job data, and - * "minimal", to not include the job configuration. - * @param {string} [options.stateFilter] Filter for job state. Acceptable - * values are "done", "pending", and "running". Sending an array to this - * option performs a disjunction. - * @param {function} [callback] The callback function. - * @param {?error} callback.err An error returned while making this request - * @param {Job[]} callback.jobs The list of jobs in your - * project. - * @returns {Promise} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * bigquery.getJobs(function(err, jobs) { - * if (!err) { - * // jobs is an array of Job objects. - * } - * }); - * - * //- - * // To control how many API requests are made and page through the results - * // manually, set `autoPaginate` to `false`. - * //- - * function manualPaginationCallback(err, jobs, nextQuery, apiRespose) { - * if (nextQuery) { - * // More results exist. - * bigquery.getJobs(nextQuery, manualPaginationCallback); - * } - * } - * - * bigquery.getJobs({ - * autoPaginate: false - * }, manualPaginationCallback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bigquery.getJobs().then(function(data) { - * const jobs = data[0]; - * }); - */ - getJobs( - optionsOrCallback?: GetJobsOptions|GetJobsCallback, - cb?: GetJobsCallback): void|Promise { - const that = this; - const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.request( - { - uri: '/jobs', - qs: options, - useQuerystring: true, - }, - (err, resp) => { - if (err) { - callback!(err, null, null, resp); - return; - } - - let nextQuery: {}|null = null; - - if (resp.nextPageToken) { - nextQuery = extend({}, options, { - pageToken: resp.nextPageToken, - }); - } - - // tslint:disable-next-line no-any - const jobs = (resp.jobs || []).map((jobObject: any) => { - const job = that.job(jobObject.jobReference.jobId, { - location: jobObject.jobReference.location, - }); - - job.metadata = jobObject; - return job; - }); - - callback!(null, jobs, nextQuery, resp); - }); - } - - /** - * Create a reference to an existing job. - * - * @param {string} id ID of the job. - * @param {object} [options] Configuration object. - * @param {string} [options.location] The geographic location of the job. - * Required except for US and EU. - * @returns {Job} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * const myExistingJob = bigquery.job('job-id'); - */ - job(id: string, options?: JobOptions) { - if (this.location) { - options = extend({location: this.location}, options); - } - return new Job(this, id, options); - } - - query(query: string, options?: QueryOptions): Promise; - query(query: Query, options?: QueryOptions): Promise; - query(query: string, options: QueryOptions, callback?: QueryRowsCallback): - void; - query( - query: Query, options: QueryOptions, - callback?: SimpleQueryRowsCallback): void; - query(query: string, callback?: QueryRowsCallback): void; - query(query: Query, callback?: SimpleQueryRowsCallback): void; - /** - * Run a query scoped to your project. For manual pagination please refer to - * {@link BigQuery#createQueryJob}. - * - * @see [Jobs: query API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/query} - * - * @param {string|object} query A string SQL query or configuration object. - * For all available options, see - * [Jobs: query request - * body](https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#request-body). - * @param {string} [query.location] The geographic location of the job. - * Required except for US and EU. - * @param {string} [query.jobId] Custom id for the underlying job. - * @param {string} [query.jobPrefix] Prefix to apply to the underlying job id. - * @param {object|Array<*>} query.params For positional SQL parameters, provide - * an array of values. For named SQL parameters, provide an object which - * maps each named parameter to its value. The supported types are - * integers, floats, {@link BigQuery#date} objects, {@link BigQuery#datetime} - * objects, {@link BigQuery#time} objects, {@link BigQuery#timestamp} - * objects, Strings, Booleans, and Objects. - * @param {string} query.query A query string, following the BigQuery query - * syntax, of the query to execute. - * @param {boolean} [query.useLegacySql=false] Option to use legacy sql syntax. - * @param {object} [options] Configuration object for query results. - * @param {number} [options.maxResults] Maximum number of results to read. - * @param {number} [options.timeoutMs] How long to wait for the query to - * complete, in milliseconds, before returning. Default is to return - * immediately. If the timeout passes before the job completes, the - * request will fail with a `TIMEOUT` error. - * @param {function} [callback] The callback function. - * @param {?error} callback.err An error returned while making this request - * @param {array} callback.rows The list of results from your query. - * @returns {Promise} - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * const bigquery = new BigQuery(); - * - * const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT - * 100'; - * - * bigquery.query(query, function(err, rows) { - * if (!err) { - * // rows is an array of results. - * } - * }); - * - * //- - * // Positional SQL parameters are supported. - * //- - * bigquery.query({ - * query: [ - * 'SELECT url', - * 'FROM `publicdata.samples.github_nested`', - * 'WHERE repository.owner = ?' - * ].join(' '), - * - * params: [ - * 'google' - * ] - * }, function(err, rows) {}); - * - * //- - * // Or if you prefer to name them, that's also supported. - * //- - * bigquery.query({ - * query: [ - * 'SELECT url', - * 'FROM `publicdata.samples.github_nested`', - * 'WHERE repository.owner = @owner' - * ].join(' '), - * params: { - * owner: 'google' - * } - * }, function(err, rows) {}); - * - * //- - * // If you need to use a `DATE`, `DATETIME`, `TIME`, or `TIMESTAMP` type in - * // your query, see {@link BigQuery#date}, {@link BigQuery#datetime}, - * // {@link BigQuery#time}, and {@link BigQuery#timestamp}. - * //- - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bigquery.query(query).then(function(data) { - * const rows = data[0]; - * }); - */ - query( - query: string|Query, - optionsOrCallback?: QueryOptions|SimpleQueryRowsCallback| - QueryRowsCallback, - cb?: SimpleQueryRowsCallback|QueryRowsCallback): - void|Promise|Promise { - let options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.createQueryJob(query, (err, job, resp) => { - if (err) { - (callback as SimpleQueryRowsCallback)(err, null, resp); - return; - } - if (typeof query === 'object' && query.dryRun) { - (callback as SimpleQueryRowsCallback)(null, [], resp); - return; - } - // The Job is important for the `queryAsStream_` method, so a new query - // isn't created each time results are polled for. - options = extend({job}, options); - job!.getQueryResults(options, callback as QueryRowsCallback); - }); - } - - /** - * This method will be called by `createQueryStream()`. It is required to - * properly set the `autoPaginate` option value. - * - * @private - */ - queryAsStream_(query: Query, callback?: SimpleQueryRowsCallback) { - if (query.job) { - query.job.getQueryResults(query, callback as QueryRowsCallback); - return; - } - this.query(query, {autoPaginate: false}, callback); - } -} - -/*! Developer Documentation - * - * These methods can be auto-paginated. - */ -paginator.extend(BigQuery, ['getDatasets', 'getJobs']); - -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -promisifyAll(BigQuery, { - exclude: ['dataset', 'date', 'datetime', 'job', 'time', 'timestamp'], -}); - -/** - * Date class for BigQuery. - */ -export class BigQueryDate { - value: string; - constructor(value: BigQueryDateOptions|string) { - if (typeof value === 'object') { - value = BigQuery.datetime(value).value; - } - this.value = value; - } -} - -/** - * Timestamp class for BigQuery. - */ -export class BigQueryTimestamp { - value: string; - constructor(value: Date|string) { - this.value = new Date(value).toJSON(); - } -} - -/** - * Datetime class for BigQuery. - */ -export class BigQueryDatetime { - value: string; - constructor(value: BigQueryDatetimeOptions|string) { - if (typeof value === 'object') { - let time; - if (value.hours) { - time = BigQuery.time(value).value; - } - value = format('{y}-{m}-{d}{time}', { - y: value.year, - m: value.month, - d: value.day, - time: time ? ' ' + time : '', - }); - } else { - value = value.replace(/^(.*)T(.*)Z$/, '$1 $2'); - } - this.value = value as string; - } -} - -/** - * Time class for BigQuery. - */ -export class BigQueryTime { - value: string; - constructor(value: BigQueryTimeOptions|string) { - if (typeof value === 'object') { - value = format('{h}:{m}:{s}{f}', { - h: value.hours, - m: value.minutes || 0, - s: value.seconds || 0, - f: is.defined(value.fractional) ? '.' + value.fractional : '', - }); - } - this.value = value as string; - } -} - -/** - * {@link Dataset} class. - * - * @name BigQuery.Dataset - * @see Dataset - * @type {constructor} - */ -export {Dataset}; - -/** - * {@link Job} class. - * - * @name BigQuery.Job - * @see Job - * @type {constructor} - */ -export {Job}; - -/** - * {@link Table} class. - * - * @name BigQuery.Table - * @see Table - * @type {constructor} - */ -export {Table}; - -/** - * The `@google-cloud/bigquery` package exports an object that contains the - * {@link BigQuery} class. - * - * See {@link BigQuery} and {@link BigQueryOptions} for client methods and - * configuration options. - * - * @module {object} @google-cloud/bigquery - * @alias nodejs-bigquery - * - * @example npm install --save - * @google-cloud/bigquery - * - * @example - * const {BigQuery} = require('@google-cloud/bigquery'); - * - * @example const bigquery = new BigQuery(); - * - * @example const bigquery = new BigQuery({ projectId: - * 'your-project-id', keyFilename: '/path/to/keyfile.json' - * }); - * - * @example - * region_tag:bigquery_quickstart - * Full quickstart example: - */ - -export {RowsCallback, RowsResponse, RowMetadata}; +export { + BigQuery, + BigQueryDate, + BigQueryDateOptions, + BigQueryDatetime, + BigQueryDatetimeOptions, + BigQueryInt, + BigQueryOptions, + BigQueryTime, + BigQueryTimeOptions, + BigQueryTimestamp, + DatasetCallback, + DatasetResource, + DatasetResponse, + DatasetsCallback, + DatasetsResponse, + Geography, + GetDatasetsOptions, + GetJobsCallback, + GetJobsOptions, + GetJobsResponse, + IntegerTypeCastOptions, + IntegerTypeCastValue, + JobRequest, + Json, + PROTOCOL_REGEX, + PagedCallback, + PagedRequest, + PagedResponse, + ProvidedTypeArray, + ProvidedTypeStruct, + Query, + QueryOptions, + QueryParameter, + QueryRowsCallback, + QueryRowsResponse, + QueryStreamOptions, + RequestCallback, + ResourceCallback, + SimpleQueryRowsCallback, + SimpleQueryRowsResponse, + ValueType, +} from './bigquery'; + +export { + CreateDatasetOptions, + Dataset, + DatasetDeleteOptions, + DatasetOptions, + GetModelsCallback, + GetModelsOptions, + GetModelsResponse, + GetRoutinesCallback, + GetRoutinesOptions, + GetRoutinesResponse, + GetTablesCallback, + GetTablesOptions, + GetTablesResponse, + RoutineCallback, + RoutineMetadata, + RoutineResponse, + TableCallback, + TableResponse, +} from './dataset'; + +export { + CancelCallback, + CancelResponse, + Job, + JobMetadata, + JobOptions, + QueryResultsOptions, +} from './job'; + +export { + CreateExtractJobOptions, + File, + JobCallback, + JobMetadataCallback, + JobMetadataResponse, + JobResponse, + Model, +} from './model'; + +export {Routine} from './routine'; + +export { + CopyTableMetadata, + CreateCopyJobMetadata, + FormattedMetadata, + GetPolicyOptions, + GetRowsOptions, + InsertRow, + InsertRowsCallback, + InsertRowsOptions, + InsertRowsResponse, + JobLoadMetadata, + PartialInsertFailure, + PermissionsCallback, + PermissionsResponse, + Policy, + PolicyCallback, + PolicyRequest, + PolicyResponse, + RowMetadata, + RowsCallback, + RowsResponse, + SetPolicyOptions, + SetTableMetadataOptions, + Table, + TableField, + TableMetadata, + TableOptions, + TableRow, + TableRowField, + TableRowValue, + TableSchema, + ViewDefinition, +} from './table'; diff --git a/src/job.ts b/src/job.ts index e08c479f..997d275c 100644 --- a/src/job.ts +++ b/src/job.ts @@ -18,37 +18,38 @@ * @module bigquery/job */ -import {Metadata, MetadataCallback, Operation, util} from '@google-cloud/common'; -import {paginator} from '@google-cloud/paginator'; +import { + Metadata, + MetadataCallback, + Operation, + util, +} from '@google-cloud/common'; +import {paginator, ResourceStream} from '@google-cloud/paginator'; import {promisifyAll} from '@google-cloud/promisify'; import * as extend from 'extend'; -import * as r from 'request'; -import {Readable} from 'stream'; -import {teenyRequest} from 'teeny-request'; -import {BigQuery, QueryRowsCallback, QueryRowsResponse} from '../src'; - -// tslint:disable-next-line no-any -export type JobMetadata = any; - -// tslint:disable-next-line no-any -export type JobOptions = any; - -export interface CancelCallback { - (err: Error|null, apiResponse?: r.Response): void; -} - -export type CancelResponse = [r.Response]; - - -export interface QueryResultsOptions { - autoPaginate?: boolean; - maxApiCalls?: number; - maxResults?: number; - pageToken?: string; - startIndex?: number; - timeoutMs?: number; -} +import { + BigQuery, + IntegerTypeCastOptions, + JobRequest, + PagedRequest, + QueryRowsCallback, + QueryRowsResponse, + RequestCallback, +} from './bigquery'; +import {RowMetadata} from './table'; +import bigquery from './types'; + +export type JobMetadata = bigquery.IJob; +export type JobOptions = JobRequest; + +export type CancelCallback = RequestCallback; +export type CancelResponse = [bigquery.IJobCancelResponse]; + +export type QueryResultsOptions = { + job?: Job; + wrapIntegers?: boolean | IntegerTypeCastOptions; +} & PagedRequest; /** * @callback QueryResultsCallback @@ -121,12 +122,11 @@ export interface QueryResultsOptions { class Job extends Operation { bigQuery: BigQuery; location?: string; - getQueryResultsStream: (options?: QueryResultsOptions) => Readable; + getQueryResultsStream: ( + options?: QueryResultsOptions + ) => ResourceStream; constructor(bigQuery: BigQuery, id: string, options?: JobOptions) { - let location; - if (options && options.location) { - location = options.location; - } + let location: string; const methods = { /** @@ -219,7 +219,11 @@ class Job extends Operation { */ getMetadata: { reqOpts: { - qs: {location}, + qs: { + get location() { + return location; + }, + }, }, }, }; @@ -229,7 +233,15 @@ class Job extends Operation { baseUrl: '/jobs', id, methods, - requestModule: teenyRequest as typeof r, + }); + + Object.defineProperty(this, 'location', { + get() { + return location; + }, + set(_location) { + location = _location; + }, }); this.bigQuery = bigQuery; @@ -260,8 +272,9 @@ class Job extends Operation { * })) * .pipe(fs.createWriteStream('./test/testdata/testfile.json')); */ - this.getQueryResultsStream = - paginator.streamify('getQueryResultsAsStream_'); + this.getQueryResultsStream = paginator.streamify( + 'getQueryResultsAsStream_' + ); } cancel(): Promise; @@ -296,7 +309,7 @@ class Job extends Operation { * const apiResponse = data[0]; * }); */ - cancel(callback?: CancelCallback): void|Promise { + cancel(callback?: CancelCallback): void | Promise { let qs; if (this.location) { @@ -304,17 +317,20 @@ class Job extends Operation { } this.request( - { - method: 'POST', - uri: '/cancel', - qs, - }, - callback!); + { + method: 'POST', + uri: '/cancel', + qs, + }, + callback! + ); } getQueryResults(options?: QueryResultsOptions): Promise; - getQueryResults(options: QueryResultsOptions, callback: QueryRowsCallback): - void; + getQueryResults( + options: QueryResultsOptions, + callback: QueryRowsCallback + ): void; getQueryResults(callback: QueryRowsCallback): void; /** * Get the results of a job. @@ -331,9 +347,14 @@ class Job extends Operation { * to the `nextQuery` argument of your callback. * @param {number} [options.startIndex] Zero-based index of the starting row. * @param {number} [options.timeoutMs] How long to wait for the query to - * complete, in milliseconds, before returning. Default is to return - * immediately. If the timeout passes before the job completes, the - * request will fail with a `TIMEOUT` error. + * complete, in milliseconds, before returning. Default is 10 seconds. + * If the timeout passes before the job completes, an error will be returned + * and the 'jobComplete' field in the response will be false. + * @param {boolean|IntegerTypeCastOptions} [options.wrapIntegers=false] Wrap values + * of 'INT64' type in {@link BigQueryInt} objects. + * If a `boolean`, this will wrap values in {@link BigQueryInt} objects. + * If an `object`, this will return a value returned by + * `wrapIntegers.integerTypeCastFunction`. * @param {QueryResultsCallback|ManualQueryResultsCallback} [callback] The * callback function. If `autoPaginate` is set to false a * {@link ManualQueryResultsCallback} should be used. @@ -384,50 +405,74 @@ class Job extends Operation { * }); */ getQueryResults( - optionsOrCallback?: QueryResultsOptions|QueryRowsCallback, - cb?: QueryRowsCallback): void|Promise { - let options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + optionsOrCallback?: QueryResultsOptions | QueryRowsCallback, + cb?: QueryRowsCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - options = extend( - { - location: this.location, - }, - options); + const qs = extend( + { + location: this.location, + }, + options + ); - this.bigQuery.request( - { - uri: '/queries/' + this.id, - qs: options, - }, - (err, resp) => { - if (err) { - callback!(err, null, null, resp); - return; - } + const wrapIntegers = qs.wrapIntegers ? qs.wrapIntegers : false; + delete qs.wrapIntegers; - // tslint:disable-next-line no-any - let rows: any = []; + delete qs.job; - if (resp.schema && resp.rows) { - rows = BigQuery.mergeSchemaWithRows_(resp.schema, resp.rows); - } + const timeoutOverride = + typeof qs.timeoutMs === 'number' ? qs.timeoutMs : false; - let nextQuery: {}|null = null; - if (resp.jobComplete === false) { - // Query is still running. - nextQuery = extend({}, options); - } else if (resp.pageToken) { - // More results exist. - nextQuery = extend({}, options, { - pageToken: resp.pageToken, - }); + this.bigQuery.request( + { + uri: '/queries/' + this.id, + qs, + }, + (err, resp) => { + if (err) { + callback!(err, null, null, resp); + return; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let rows: any = []; + + if (resp.schema && resp.rows) { + rows = BigQuery.mergeSchemaWithRows_( + resp.schema, + resp.rows, + wrapIntegers + ); + } + + let nextQuery: {} | null = null; + if (resp.jobComplete === false) { + // Query is still running. + nextQuery = Object.assign({}, options); + + // If timeout override was provided, return error. + if (timeoutOverride) { + const err = new Error( + `The query did not complete before ${timeoutOverride}ms` + ); + callback!(err, null, nextQuery, resp); + return; } - - callback!(null, rows, nextQuery, resp); - }); + } else if (resp.pageToken) { + // More results exist. + nextQuery = Object.assign({}, options, { + pageToken: resp.pageToken, + }); + } + + callback!(null, rows, nextQuery, resp); + } + ); } /** @@ -437,7 +482,9 @@ class Job extends Operation { * @private */ getQueryResultsAsStream_( - options: QueryResultsOptions, callback: QueryRowsCallback): void { + options: QueryResultsOptions, + callback: QueryRowsCallback + ): void { options = extend({autoPaginate: false}, options); this.getQueryResults(options, callback); } @@ -454,28 +501,23 @@ class Job extends Operation { * @param {function} callback */ poll_(callback: MetadataCallback): void { - this.getMetadata( - (err: Error, metadata: Metadata, apiResponse: r.Response) => { - // tslint:disable-next-line no-any - if (!err && (apiResponse as any).status && - // tslint:disable-next-line no-any - (apiResponse as any).status.errors) { - // tslint:disable-next-line no-any - err = new util.ApiError((apiResponse as any).status); - } - - if (err) { - callback(err); - return; - } - - if (metadata.status.state !== 'DONE') { - callback(null); - return; - } - - callback(null, metadata); - }); + this.getMetadata((err: Error, metadata: Metadata) => { + if (!err && metadata.status && metadata.status.errorResult) { + err = new util.ApiError(metadata.status); + } + + if (err) { + callback(err); + return; + } + + if (metadata.status.state !== 'DONE') { + callback(null); + return; + } + + callback(null, metadata); + }); } } diff --git a/src/model.ts b/src/model.ts new file mode 100644 index 00000000..a0f9b250 --- /dev/null +++ b/src/model.ts @@ -0,0 +1,475 @@ +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as common from '@google-cloud/common'; +import {promisifyAll} from '@google-cloud/promisify'; +import arrify = require('arrify'); +import * as extend from 'extend'; +import { + BigQuery, + Job, + Dataset, + ResourceCallback, + RequestCallback, + JobRequest, +} from '.'; +import {JobMetadata} from './job'; +import bigquery from './types'; + +// This is supposed to be a @google-cloud/storage `File` type. The storage npm +// module includes these types, but is currently installed as a devDependency. +// Unless it's included as a production dependency, the types would not be +// included. The storage module is fairly large, and only really needed for +// types. We need to figure out how to include these types properly. +export interface File { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + bucket: any; + kmsKeyName?: string; + userProject?: string; + name: string; + generation?: number; +} + +export type JobMetadataCallback = RequestCallback; +export type JobMetadataResponse = [JobMetadata]; + +export type JobResponse = [Job, bigquery.IJob]; +export type JobCallback = ResourceCallback; + +export type CreateExtractJobOptions = JobRequest< + bigquery.IJobConfigurationExtract +> & { + format?: 'ML_TF_SAVED_MODEL' | 'ML_XGBOOST_BOOSTER'; +}; + +/** + * The model export formats accepted by BigQuery. + * + * @type {array} + * @private + */ +const FORMATS = ['ML_TF_SAVED_MODEL', 'ML_XGBOOST_BOOSTER']; + +/** + * Model objects are returned by methods such as {@link Dataset#model} and + * {@link Dataset#getModels}. + * + * @class + * @param {Dataset} dataset {@link Dataset} instance. + * @param {string} id The ID of the model. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * + * const model = dataset.model('my-model'); + */ +class Model extends common.ServiceObject { + dataset: Dataset; + bigQuery: BigQuery; + + constructor(dataset: Dataset, id: string) { + const methods = { + /** + * Delete the model. + * + * @see [Models: delete API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/delete} + * + * @method Model#delete + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this + * request. + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * model.delete((err, apiResponse) => {}); + * + * @example + * const [apiResponse] = await model.delete(); + */ + delete: true, + + /** + * Check if the model exists. + * + * @method Model#exists + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this + * request. + * @param {boolean} callback.exists Whether the model exists or not. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * model.exists((err, exists) => {}); + * + * @example + * const [exists] = await model.exists(); + */ + exists: true, + + /** + * Get a model if it exists. + * + * @see [Models: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/get} + * + * @method Model#get: + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this + * request. + * @param {Model} callback.model The {@link Model}. + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * model.get(err => { + * if (!err) { + * // `model.metadata` has been populated. + * } + * }); + * + * @example + * await model.get(); + */ + get: true, + + /** + * Return the metadata associated with the model. + * + * @see [Models: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/get} + * + * @method Model#getMetadata + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this + * request. + * @param {object} callback.metadata The metadata of the model. + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * model.getMetadata((err, metadata, apiResponse) => {}); + * + * @example + * const [metadata, apiResponse] = await model.getMetadata(); + */ + getMetadata: true, + + /** + * @see [Models: patch API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/patch} + * + * @method Model#setMetadata + * @param {object} metadata The metadata key/value object to set. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this + * request. + * @param {object} callback.metadata The updated metadata of the model. + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * const metadata = { + * friendlyName: 'TheBestModelEver' + * }; + * + * model.setMetadata(metadata, (err, metadata, apiResponse) => {}); + * + * @example + * const [metadata, apiResponse] = await model.setMetadata(metadata); + */ + setMetadata: true, + }; + + super({ + parent: dataset, + baseUrl: '/models', + id, + methods, + }); + + this.dataset = dataset; + this.bigQuery = dataset.bigQuery; + } + + createExtractJob( + destination: string | File, + options?: CreateExtractJobOptions + ): Promise; + createExtractJob( + destination: string | File, + options: CreateExtractJobOptions, + callback: JobCallback + ): void; + createExtractJob(destination: string | File, callback: JobCallback): void; + /** + * Export model to Cloud Storage. + * + * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} + * + * @param {string|File} destination Where the model should be exported + * to. A string or {@link + * https://googleapis.dev/nodejs/storage/latest/File.html File} + * object. + * @param {object} [options] The configuration object. + * @param {string} [options.format] The format to export the data in. + * Allowed options are "ML_TF_SAVED_MODEL" or "ML_XGBOOST_BOOSTER". + * Default: "ML_TF_SAVED_MODEL". + * @param {string} [options.jobId] Custom job id. + * @param {string} [options.jobPrefix] Prefix to apply to the job id. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request. + * @param {Job} callback.job The job used to export the model. + * @param {object} callback.apiResponse The full API response. + * + * @throws {Error} If a destination isn't a string or File object. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * const extractedModel = 'gs://my-bucket/extracted-model'; + * + * function callback(err, job, apiResponse) { + * // `job` is a Job object that can be used to check the status of the + * // request. + * } + * + * //- + * // To use the default options, just pass a string or a {@link + * https://googleapis.dev/nodejs/storage/latest/File.html File} + * object. + * // + * // Note: The default format is 'ML_TF_SAVED_MODEL'. + * //- + * model.createExtractJob(extractedModel, callback); + * + * //- + * // If you need more customization, pass an `options` object. + * //- + * const options = { + * format: 'ML_TF_SAVED_MODEL', + * jobId: '123abc' + * }; + * + * model.createExtractJob(extractedModel, options, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * model.createExtractJob(extractedModel, options).then((data) => { + * const job = data[0]; + * const apiResponse = data[1]; + * }); + */ + createExtractJob( + destination: string | File, + optionsOrCallback?: CreateExtractJobOptions | JobCallback, + cb?: JobCallback + ): void | Promise { + let options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + options = extend(true, options, { + destinationUris: (arrify(destination) as Array).map( + dest => { + if (common.util.isCustomType(dest, 'storage/file')) { + return ( + 'gs://' + (dest as File).bucket.name + '/' + (dest as File).name + ); + } + + if (typeof dest === 'string') { + return dest; + } + throw new Error('Destination must be a string or a File object.'); + } + ), + }); + + if (options.format) { + options.format = options.format.toUpperCase() as typeof options.format; + + if (FORMATS.includes(options.format as string)) { + options.destinationFormat = options.format!; + delete options.format; + } else { + throw new Error('Destination format not recognized: ' + options.format); + } + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const body: any = { + configuration: { + extract: extend(true, options, { + sourceModel: { + datasetId: this.dataset.id, + projectId: this.bigQuery.projectId, + modelId: this.id, + }, + }), + }, + }; + + if (options.jobPrefix) { + body.jobPrefix = options.jobPrefix; + delete options.jobPrefix; + } + + if (options.jobId) { + body.jobId = options.jobId; + delete options.jobId; + } + + this.bigQuery.createJob(body, callback!); + } + + extract( + destination: string | File, + options?: CreateExtractJobOptions + ): Promise; + extract( + destination: string | File, + options: CreateExtractJobOptions, + callback?: JobMetadataCallback + ): void; + extract(destination: string | File, callback?: JobMetadataCallback): void; + /** + * Export model to Cloud Storage. + * + * @param {string|File} destination Where the model should be exported + * to. A string or {@link + * https://googleapis.dev/nodejs/storage/latest/File.html File} + * object. + * @param {object} [options] The configuration object. + * @param {string} [options.format] The format to export + * the data in. Allowed options are "ML_TF_SAVED_MODEL" or + * "ML_XGBOOST_BOOSTER". Default: "ML_TF_SAVED_MODEL". + * @param {string} [options.jobId] Custom id for the underlying job. + * @param {string} [options.jobPrefix] Prefix to apply to the underlying job id. + * @param {function} [callback] The callback function. + * @param {?error} callback.err An error returned while making this request + * @param {object} callback.apiResponse The full API response. + * @returns {Promise} + * + * @throws {Error} If destination isn't a string or File object. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const model = dataset.model('my-model'); + * + * const extractedModel = 'gs://my-bucket/extracted-model'; + * + * + * //- + * function callback(err, job, apiResponse) { + * // `job` is a Job object that can be used to check the status of the + * // request. + * } + * + * //- + * // To use the default options, just pass a string or a {@link + * https://googleapis.dev/nodejs/storage/latest/File.html File} + * object. + * // + * // Note: The default format is 'ML_TF_SAVED_MODEL'. + * //- + * model.createExtractJob(extractedModel, callback); + * + * //- + * // If you need more customization, pass an `options` object. + * //- + * const options = { + * format: 'ML_TF_SAVED_MODEL', + * jobId: '123abc' + * }; + * + * model.createExtractJob(extractedModel, options, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * model.createExtractJob(extractedModel, options).then((data) => { + * const job = data[0]; + * const apiResponse = data[1]; + * }); + */ + extract( + destination: string | File, + optionsOrCallback?: CreateExtractJobOptions | JobMetadataCallback, + cb?: JobMetadataCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + this.createExtractJob(destination, options, (err, job, resp) => { + if (err) { + callback!(err, resp); + return; + } + + job!.on('error', callback!).on('complete', metadata => { + callback!(null, metadata); + }); + }); + } +} + +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Model); + +/** + * Reference to the {@link Model} class. + * @name module:@google-cloud/bigquery.Model + * @see Model + */ +export {Model}; diff --git a/src/routine.ts b/src/routine.ts new file mode 100644 index 00000000..1ca9c921 --- /dev/null +++ b/src/routine.ts @@ -0,0 +1,291 @@ +/*! + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as common from '@google-cloud/common'; +import {promisifyAll} from '@google-cloud/promisify'; +import extend = require('extend'); + +import {Dataset, RoutineMetadata} from './dataset'; + +/** + * Routine objects are returned by methods such as + * {@link Dataset#routine}, {@link Dataset#createRoutine}, and + * {@link Dataset#getRoutines}. + * + * @class + * @param {Dataset} dataset {@link Dataset} instance. + * @param {string} id The ID of the routine. + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * + * const routine = dataset.routine('my_routine'); + */ +class Routine extends common.ServiceObject { + constructor(dataset: Dataset, id: string) { + const methods = { + /** + * Create a routine. + * + * @see [Routines: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert} + * + * @method Routine#create + * @param {object} config A [routine resource]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine}. + * @param {CreateRoutineCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const routine = dataset.routine('my_routine'); + * + * const config = { + * arguments: [{ + * name: 'x', + * dataType: { + * typeKind: 'INT64' + * } + * }], + * definitionBody: 'x * 3', + * routineType: 'SCALAR_FUNCTION', + * returnType: { + * typeKind: 'INT64' + * } + * }; + * + * routine.create(config, (err, routine, apiResponse) => { + * if (!err) { + * // The routine was created successfully. + * } + * }); + * + * @example + * const [routine, apiResponse] = await routine.create(config); + */ + create: true, + + /** + * @callback DeleteRoutineCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} DeleteRoutineResponse + * @property {object} 0 The full API response. + */ + /** + * Deletes a routine. + * + * @see [Routines: delete API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/delete} + * + * @method Routine#delete + * @param {DeleteRoutineCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const routine = dataset.routine('my_routine'); + * + * routine.delete((err, apiResponse) => {}); + * + * @example + * const [apiResponse] = await routine.delete(); + */ + delete: true, + + /** + * @callback RoutineExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Indicates if the routine exists. + */ + /** + * @typedef {array} RoutineExistsResponse + * @property {boolean} 0 Indicates if the routine exists. + */ + /** + * Check if the routine exists. + * + * @method Routine#exists + * @param {RoutineExistsCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const routine = dataset.routine('my_routine'); + * + * routine.exists((err, exists) => {}); + * + * @example + * const [exists] = await routine.exists(); + */ + exists: true, + + /** + * @callback GetRoutineCallback + * @param {?Error} err Request error, if any. + * @param {Routine} routine The routine. + * @param {object} apiResponse The full API response body. + */ + /** + * @typedef {array} GetRoutineResponse + * @property {Routine} 0 The routine. + * @property {object} 1 The full API response body. + */ + /** + * Get a routine if it exists. + * + * @see [Routines: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/get} + * + * @method Routine#get + * @param {GetRoutineCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const routine = dataset.routine('my_routine'); + * + * routine.get((err, routine) => {}); + * + * @example + * const [routine2] = await routine.get(); + */ + get: true, + + /** + * @callback GetRoutineMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The routine metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetRoutineMetadataResponse + * @property {object} 0 The routine metadata. + * @property {object} 1 The full API response. + */ + /** + * Get the metadata associated with a routine. + * + * @see [Routines: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/get} + * + * @method Routine#getMetadata + * @param {GetRoutineMetadataCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const routine = dataset.routine('my_routine'); + * + * routine.getMetadata((err, metadata, apiResponse) => {}); + * + * @example + * const [metadata, apiResponse] = await routine.getMetadata(); + */ + getMetadata: true, + + /** + * @callback SetRoutineMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The routine metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetRoutineMetadataResponse + * @property {object} 0 The routine metadata. + * @property {object} 1 The full API response. + */ + /** + * Update a routine. + * + * @see [Routines: update API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/update} + * + * @method Routine#setMetadata + * @param {object} metadata A [routine resource object]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine}. + * @param {SetRoutineMetadataCallback} [callback] The callback function. + * @returns {Promise} + * + * @example + * const {BigQuery} = require('@google-cloud/bigquery'); + * const bigquery = new BigQuery(); + * const dataset = bigquery.dataset('my-dataset'); + * const routine = dataset.routine('my_routine'); + * + * const updates = { + * description: 'The perfect description!' + * }; + * + * routine.setMetadata(updates, (err, metadata, apiResponse) => {}); + * + * @example + * const [metadata, apiResponse] = await routine.setMetadata(updates); + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + + super({ + parent: dataset, + baseUrl: '/routines', + id, + methods, + createMethod: dataset.createRoutine.bind(dataset), + }); + } + + setMetadata(metadata: RoutineMetadata): Promise; + setMetadata( + metadata: RoutineMetadata, + callback: common.ResponseCallback + ): void; + setMetadata( + metadata: RoutineMetadata, + callback?: common.ResponseCallback + ): void | Promise { + // per the python client, it would appear that in order to update a routine + // you need to send the routine in its entirety, not just the updated fields + this.getMetadata((err: Error | null, fullMetadata: RoutineMetadata) => { + if (err) { + callback!(err); + return; + } + + const updatedMetadata = extend(true, {}, fullMetadata, metadata); + super.setMetadata(updatedMetadata, callback!); + }); + } +} + +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Routine); + +export {Routine}; diff --git a/src/table.ts b/src/table.ts index ad99166d..c3080d69 100644 --- a/src/table.ts +++ b/src/table.ts @@ -15,25 +15,39 @@ */ import * as common from '@google-cloud/common'; -import {paginator} from '@google-cloud/paginator'; +import {paginator, ResourceStream} from '@google-cloud/paginator'; import {promisifyAll} from '@google-cloud/promisify'; -import * as arrify from 'arrify'; +import arrify = require('arrify'); import Big from 'big.js'; -import * as duplexify from 'duplexify'; import * as extend from 'extend'; - -const format = require('string-format-obj'); +import pEvent from 'p-event'; import * as fs from 'fs'; import * as is from 'is'; import * as path from 'path'; -import * as r from 'request'; import * as streamEvents from 'stream-events'; import * as uuid from 'uuid'; -import {BigQuery, Job, Dataset, Query, SimpleQueryRowsResponse, SimpleQueryRowsCallback} from '../src'; +import { + BigQuery, + Job, + Dataset, + Query, + SimpleQueryRowsResponse, + SimpleQueryRowsCallback, + ResourceCallback, + RequestCallback, + PagedResponse, + PagedCallback, + JobRequest, + PagedRequest, +} from '.'; import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; -import {Duplex, Readable, Writable} from 'stream'; -import {teenyRequest} from 'teeny-request'; +import {Duplex, Writable} from 'stream'; import {JobMetadata} from './job'; +import bigquery from './types'; +import {IntegerTypeCastOptions} from './bigquery'; + +// eslint-disable-next-line @typescript-eslint/no-var-requires +const duplexify = require('duplexify'); // This is supposed to be a @google-cloud/storage `File` type. The storage npm // module includes these types, but is current installed as a devDependency. @@ -41,7 +55,7 @@ import {JobMetadata} from './job'; // included. The storage module is fairly large, and only really needed for // types. We need to figure out how to include these types properly. export interface File { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any bucket: any; kmsKeyName?: string; userProject?: string; @@ -49,150 +63,94 @@ export interface File { generation?: number; } -export interface JobMetadataCallback { - (err: Error|null, metadataOrResponse: JobMetadata|r.Response): void; -} - +export type JobMetadataCallback = RequestCallback; export type JobMetadataResponse = [JobMetadata]; -// tslint:disable-next-line no-any +// eslint-disable-next-line @typescript-eslint/no-explicit-any export type RowMetadata = any; -export interface InsertRowsOptions { - autoCreate?: boolean; - ignoreUnknownValues?: boolean; +export type InsertRowsOptions = bigquery.ITableDataInsertAllRequest & { + createInsertId?: boolean; + partialRetries?: number; raw?: boolean; - schema?: string|{}; - skipInvalidRows?: boolean; - templateSuffix?: string; + schema?: string | {}; +}; + +export type InsertRowsResponse = [ + bigquery.ITableDataInsertAllResponse | bigquery.ITable +]; +export type InsertRowsCallback = RequestCallback< + bigquery.ITableDataInsertAllResponse | bigquery.ITable +>; + +export type RowsResponse = PagedResponse< + RowMetadata, + GetRowsOptions, + bigquery.ITableDataList | bigquery.ITable +>; +export type RowsCallback = PagedCallback< + RowMetadata, + GetRowsOptions, + bigquery.ITableDataList | bigquery.ITable +>; + +export interface InsertRow { + insertId?: string; + json?: bigquery.IJsonObject; } -export type RowsResponse = [RowMetadata[], GetRowsOptions, r.Response]; -export interface RowsCallback { - (err: Error|null, rows?: RowMetadata[]|null, nextQuery?: GetRowsOptions|null, - apiResponse?: r.Response): void; -} +export type TableRow = bigquery.ITableRow; +export type TableRowField = bigquery.ITableCell; +export type TableRowValue = string | TableRow; -export interface TableRow { - f: TableRowField[]; -} -export interface TableRowField { - v: string|TableRow|TableRowField[]; -} -export type TableRowValue = string|TableRow; - -export interface GetRowsOptions { - startIndex?: number; - selectedFields?: string; - autoPaginate?: boolean; - maxApiCalls?: number; - maxResults?: number; -} +export type GetRowsOptions = PagedRequest & { + wrapIntegers?: boolean | IntegerTypeCastOptions; +}; -export interface JobLoadMetadata { - jobId?: string; - jobPrefix?: string; - allowJaggedRows?: boolean; - allowQuotedNewlines?: boolean; - autodetect?: boolean; - clustering?: {fields: number[];}; - createDisposition?: string; - destinationEncryptionConfiguration?: {kmsKeyName?: string;}; - destinationTable?: {datasetId: string; projectId: string; tableId: string;}; - destinationTableProperties?: {description?: string; friendlyName?: string;}; - encoding?: string; - fieldDelimiter?: string; - ignoreUnknownValues?: boolean; - maxBadRecords?: number; - nullMarker?: string; - projectionFields?: string[]; - quote?: string; - schema?: {fields: TableField[]}; - schemaUpdateOptions?: string[]; - skipLeadingRows?: number; - sourceFormat?: string; +export type JobLoadMetadata = JobRequest & { format?: string; - location?: string; - sourceUris?: string[]; - timePartitioning?: { - expirationMs?: number; - field?: string; - requirePartitionFilter?: boolean; - type?: string; - }; - writeDisposition?: string; -} +}; -export interface CreateExtractJobOptions { - format?: 'CSV'|'JSON'|'AVRO'|'PARQUET'|'ORC'; +export type CreateExtractJobOptions = JobRequest< + bigquery.IJobConfigurationExtract +> & { + format?: 'CSV' | 'JSON' | 'AVRO' | 'PARQUET' | 'ORC'; gzip?: boolean; - jobId?: string; - jobPrefix?: string; - destinationFormat?: string; - compression?: string; -} +}; -export type JobResponse = [Job, r.Response]; -export interface JobCallback { - (err: Error|null, job?: Job|null, apiResponse?: r.Response): void; -} +export type JobResponse = [Job, bigquery.IJob]; +export type JobCallback = ResourceCallback; -export interface CreateCopyJobMetadata extends CopyTableMetadata { - destinationTable?: {datasetId: string; projectId: string; tableId: string;}; - sourceTable?: {datasetId: string; projectId: string; tableId: string;}; - sourceTables: Array<{datasetId: string; projectId: string; tableId: string;}>; -} +export type CreateCopyJobMetadata = CopyTableMetadata; +export type SetTableMetadataOptions = TableMetadata; +export type CopyTableMetadata = JobRequest; -export interface SetTableMetadataOptions { - description?: string; - schema?: string|{}; -} - -export interface CopyTableMetadata { - jobId?: string; - jobPrefix?: string; - createDisposition?: 'CREATE_IF_NEEDED'|'CREATE_NEVER'; - writeDisposition?: 'WRITE_TRUNCATE'|'WRITE_APPEND'|'WRITE_EMPTY'; - destinationEncryptionConfiguration?: {kmsKeyName?: string;}; -} - -export interface TableMetadata { - name?: string; - friendlyName: string; - schema?: string|TableField[]; - partitioning?: string; - view?: string|ViewDefinition; -} - -export interface ViewDefinition { - query: string; - useLegacySql?: boolean; -} - -export interface FormattedMetadata { - schema?: TableSchema; - friendlyName: string; +export type TableMetadata = bigquery.ITable & { name?: string; + schema?: string | TableField[] | TableSchema; partitioning?: string; - timePartitioning?: {type: string}; - view: ViewDefinition; + view?: string | ViewDefinition; +}; + +export type ViewDefinition = bigquery.IViewDefinition; +export type FormattedMetadata = bigquery.ITable; +export type TableSchema = bigquery.ITableSchema; +export type TableField = bigquery.ITableFieldSchema; + +export interface PartialInsertFailure { + message: string; + reason: string; + row: RowMetadata; } -export interface TableSchema { - fields: TableField[]; -} - -export interface TableField { - name: string; - type: string; - mode?: string; - fields?: TableField[]; -} - -export type ApiResponse = [r.Response]; -export interface ApiResponseCallback { - (err: Error|null, apiResponse?: r.Response): void; -} +export type Policy = bigquery.IPolicy; +export type GetPolicyOptions = bigquery.IGetPolicyOptions; +export type SetPolicyOptions = Omit; +export type PolicyRequest = bigquery.IGetIamPolicyRequest; +export type PolicyResponse = [Policy]; +export type PolicyCallback = RequestCallback; +export type PermissionsResponse = [bigquery.ITestIamPermissionsResponse]; +export type PermissionsCallback = RequestCallback; /** * The file formats accepted by BigQuery. @@ -239,7 +197,7 @@ class Table extends common.ServiceObject { dataset: Dataset; bigQuery: BigQuery; location?: string; - createReadStream: (options?: GetRowsOptions) => Readable; + createReadStream: (options?: GetRowsOptions) => ResourceStream; constructor(dataset: Dataset, id: string, options?: TableOptions) { const methods = { /** @@ -414,7 +372,6 @@ class Table extends common.ServiceObject { id, createMethod: dataset.createTable.bind(dataset), methods, - requestModule: teenyRequest as typeof r, }); if (options && options.location) { @@ -466,7 +423,7 @@ class Table extends common.ServiceObject { * this.end(); * }); */ - this.createReadStream = paginator.streamify('getRows'); + this.createReadStream = paginator.streamify('getRows'); } /** @@ -480,16 +437,17 @@ class Table extends common.ServiceObject { */ static createSchemaFromString_(str: string): TableSchema { return str.split(/\s*,\s*/).reduce( - (acc: {fields: Array<{name: string, type: string}>}, pair) => { - acc.fields.push({ - name: pair.split(':')[0].trim(), - type: (pair.split(':')[1] || 'STRING').toUpperCase().trim(), - }); - return acc; - }, - { - fields: [], + (acc: {fields: Array<{name: string; type: string}>}, pair) => { + acc.fields.push({ + name: pair.split(':')[0].trim(), + type: (pair.split(':')[1] || 'STRING').toUpperCase().trim(), }); + return acc; + }, + { + fields: [], + } + ); } /** @@ -502,7 +460,7 @@ class Table extends common.ServiceObject { * @param {*} value The value to be converted. * @returns {*} The converted value. */ - static encodeValue_(value?: {}|null): {}|null { + static encodeValue_(value?: {} | null): {} | null { if (typeof value === 'undefined' || value === null) { return null; } @@ -518,12 +476,14 @@ class Table extends common.ServiceObject { const customTypeConstructorNames = [ 'BigQueryDate', 'BigQueryDatetime', + 'BigQueryInt', 'BigQueryTime', 'BigQueryTimestamp', + 'Geography', ]; const constructorName = value.constructor.name; const isCustomType = - customTypeConstructorNames.indexOf(constructorName) > -1; + customTypeConstructorNames.indexOf(constructorName) > -1; if (isCustomType) { return (value as {value: {}}).value; @@ -539,12 +499,14 @@ class Table extends common.ServiceObject { if (typeof value === 'object') { return Object.keys(value).reduce( - (acc: {[index: string]: {}|null}, key) => { - acc[key] = Table.encodeValue_( - (value as {[index: string]: {} | null})[key]); - return acc; - }, - {}); + (acc: {[index: string]: {} | null}, key) => { + acc[key] = Table.encodeValue_( + (value as {[index: string]: {} | null})[key] + ); + return acc; + }, + {} + ); } return value; } @@ -553,11 +515,11 @@ class Table extends common.ServiceObject { * @private */ static formatMetadata_(options: TableMetadata): FormattedMetadata { - const body = extend(true, {}, options) as {} as FormattedMetadata; + const body = (extend(true, {}, options) as {}) as FormattedMetadata; if (options.name) { body.friendlyName = options.name; - delete body.name; + delete (body as TableMetadata).name; } if (is.string(options.schema)) { @@ -571,7 +533,7 @@ class Table extends common.ServiceObject { } if (body.schema && body.schema.fields) { - body.schema.fields = body.schema.fields.map((field) => { + body.schema.fields = body.schema.fields.map(field => { if (field.fields) { field.type = 'RECORD'; } @@ -581,8 +543,9 @@ class Table extends common.ServiceObject { if (is.string(options.partitioning)) { body.timePartitioning = { - type: body.partitioning!.toUpperCase(), + type: options.partitioning!.toUpperCase(), }; + delete (body as TableMetadata).partitioning; } if (is.string(options.view)) { @@ -595,20 +558,25 @@ class Table extends common.ServiceObject { return body; } - copy(destination: Table, metadata?: CopyTableMetadata): - Promise; copy( - destination: Table, metadata: CopyTableMetadata, - callback: JobMetadataCallback): void; + destination: Table, + metadata?: CopyTableMetadata + ): Promise; + copy( + destination: Table, + metadata: CopyTableMetadata, + callback: JobMetadataCallback + ): void; copy(destination: Table, callback: JobMetadataCallback): void; /** * Copy data from one table to another, optionally creating that table. * * @param {Table} destination The destination table. * @param {object} [metadata] Metadata to set with the copy operation. The - * metadata object should be in the format of the - * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs - * resource. + * metadata object should be in the format of a + * [`JobConfigurationTableCopy`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy) + * object. + * object. * @param {string} [metadata.jobId] Custom id for the underlying job. * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job * id. @@ -630,9 +598,8 @@ class Table extends common.ServiceObject { * table.copy(yourTable, (err, apiResponse) => {}); * * //- - * // See the `configuration.copy` object - * for - * // all available options. + * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy + * // for all available options. * //- * const metadata = { * createDisposition: 'CREATE_NEVER', @@ -649,42 +616,49 @@ class Table extends common.ServiceObject { * }); */ copy( - destination: Table, - metadataOrCallback?: CopyTableMetadata|JobMetadataCallback, - cb?: JobMetadataCallback): void|Promise { + destination: Table, + metadataOrCallback?: CopyTableMetadata | JobMetadataCallback, + cb?: JobMetadataCallback + ): void | Promise { const metadata = - typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; + typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; const callback = - typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; + typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; this.createCopyJob( - destination, metadata as CreateCopyJobMetadata, (err, job, resp) => { - if (err) { - callback!(err, resp); - return; - } - - job!.on('error', callback!) - .on('complete', (metadata: JobMetadata) => { - callback!(null, metadata); - }); + destination, + metadata as CreateCopyJobMetadata, + (err, job, resp) => { + if (err) { + callback!(err, resp); + return; + } + + job!.on('error', callback!).on('complete', (metadata: JobMetadata) => { + callback!(null, metadata); }); + } + ); } - copyFrom(sourceTables: Table|Table[], metadata?: CopyTableMetadata): - Promise; copyFrom( - sourceTables: Table|Table[], metadata: CopyTableMetadata, - callback: JobMetadataCallback): void; - copyFrom(sourceTables: Table|Table[], callback: JobMetadataCallback): void; + sourceTables: Table | Table[], + metadata?: CopyTableMetadata + ): Promise; + copyFrom( + sourceTables: Table | Table[], + metadata: CopyTableMetadata, + callback: JobMetadataCallback + ): void; + copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void; /** * Copy data from multiple tables into this table. * * @param {Table|Table[]} sourceTables The * source table(s) to copy data from. * @param {object=} metadata Metadata to set with the copy operation. The - * metadata object should be in the format of the - * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs - * resource. + * metadata object should be in the format of a + * [`JobConfigurationTableCopy`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy) + * object. * @param {string} [metadata.jobId] Custom id for the underlying job. * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job * id. @@ -709,9 +683,8 @@ class Table extends common.ServiceObject { * table.copyFrom(sourceTables, (err, apiResponse) => {}); * * //- - * // See the `configuration.copy` object - * for - * // all available options. + * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy + * // for all available options. * //- * const metadata = { * createDisposition: 'CREATE_NEVER', @@ -728,29 +701,34 @@ class Table extends common.ServiceObject { * }); */ copyFrom( - sourceTables: Table|Table[], - metadataOrCallback?: CopyTableMetadata|JobMetadataCallback, - cb?: JobMetadataCallback): void|Promise { + sourceTables: Table | Table[], + metadataOrCallback?: CopyTableMetadata | JobMetadataCallback, + cb?: JobMetadataCallback + ): void | Promise { const metadata = - typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; + typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; const callback = - typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; + typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; this.createCopyFromJob(sourceTables, metadata, (err, job, resp) => { if (err) { callback!(err, resp); return; } - job!.on('error', callback!).on('complete', (metadata) => { + job!.on('error', callback!).on('complete', metadata => { callback!(null, metadata); }); }); } - createCopyJob(destination: Table, metadata?: CreateCopyJobMetadata): - Promise; createCopyJob( - destination: Table, metadata: CreateCopyJobMetadata, - callback: JobCallback): void; + destination: Table, + metadata?: CreateCopyJobMetadata + ): Promise; + createCopyJob( + destination: Table, + metadata: CreateCopyJobMetadata, + callback: JobCallback + ): void; createCopyJob(destination: Table, callback: JobCallback): void; /** * Copy data from one table to another, optionally creating that table. @@ -759,9 +737,9 @@ class Table extends common.ServiceObject { * * @param {Table} destination The destination table. * @param {object} [metadata] Metadata to set with the copy operation. The - * metadata object should be in the format of the - * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs - * resource. + * metadata object should be in the format of a + * [`JobConfigurationTableCopy`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy) + * object. * @param {string} [metadata.jobId] Custom job id. * @param {string} [metadata.jobPrefix] Prefix to apply to the job id. * @param {function} [callback] The callback function. @@ -785,9 +763,8 @@ class Table extends common.ServiceObject { * }); * * //- - * // See the `configuration.copy` object - * for - * // all available options. + * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy + * // for all available options. * //- * const metadata = { * createDisposition: 'CREATE_NEVER', @@ -805,19 +782,21 @@ class Table extends common.ServiceObject { * }); */ createCopyJob( - destination: Table, - metadataOrCallback?: CreateCopyJobMetadata|JobCallback, - cb?: JobCallback): void|Promise { + destination: Table, + metadataOrCallback?: CreateCopyJobMetadata | JobCallback, + cb?: JobCallback + ): void | Promise { if (!(destination instanceof Table)) { throw new Error('Destination must be a Table object.'); } - const metadata = typeof metadataOrCallback === 'object' ? - metadataOrCallback : - {} as CreateCopyJobMetadata; + const metadata = + typeof metadataOrCallback === 'object' + ? metadataOrCallback + : ({} as CreateCopyJobMetadata); const callback = - typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; + typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any const body: any = { configuration: { copy: extend(true, metadata, { @@ -852,12 +831,16 @@ class Table extends common.ServiceObject { this.bigQuery.createJob(body, callback!); } - createCopyFromJob(source: Table|Table[], metadata?: CopyTableMetadata): - Promise; createCopyFromJob( - source: Table|Table[], metadata: CopyTableMetadata, - callback: JobCallback): void; - createCopyFromJob(source: Table|Table[], callback: JobCallback): void; + source: Table | Table[], + metadata?: CopyTableMetadata + ): Promise; + createCopyFromJob( + source: Table | Table[], + metadata: CopyTableMetadata, + callback: JobCallback + ): void; + createCopyFromJob(source: Table | Table[], callback: JobCallback): void; /** * Copy data from multiple tables into this table. * @@ -866,9 +849,9 @@ class Table extends common.ServiceObject { * @param {Table|Table[]} sourceTables The * source table(s) to copy data from. * @param {object} [metadata] Metadata to set with the copy operation. The - * metadata object should be in the format of the - * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs - * resource. + * metadata object should be in the format of a + * [`JobConfigurationTableCopy`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy) + * object. * @param {string} [metadata.jobId] Custom job id. * @param {string} [metadata.jobPrefix] Prefix to apply to the job id. * @param {function} [callback] The callback function. @@ -898,9 +881,8 @@ class Table extends common.ServiceObject { * table.createCopyFromJob(sourceTables, callback); * * //- - * // See the `configuration.copy` object - * for - * // all available options. + * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy + * // for all available options. * //- * const metadata = { * createDisposition: 'CREATE_NEVER', @@ -918,21 +900,23 @@ class Table extends common.ServiceObject { * }); */ createCopyFromJob( - source: Table|Table[], metadataOrCallback?: CopyTableMetadata|JobCallback, - cb?: JobCallback): void|Promise { - const sourceTables = arrify(source); - sourceTables.forEach((sourceTable) => { + source: Table | Table[], + metadataOrCallback?: CopyTableMetadata | JobCallback, + cb?: JobCallback + ): void | Promise { + const sourceTables = arrify(source) as Table[]; + sourceTables.forEach(sourceTable => { if (!(sourceTable instanceof Table)) { throw new Error('Source must be a Table object.'); } }); const metadata = - typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; + typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; const callback = - typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; + typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any const body: any = { configuration: { copy: extend(true, metadata, { @@ -942,7 +926,7 @@ class Table extends common.ServiceObject { tableId: this.id, }, - sourceTables: sourceTables.map((sourceTable) => { + sourceTables: sourceTables.map(sourceTable => { return { datasetId: sourceTable.dataset.id, projectId: sourceTable.bigQuery.projectId, @@ -970,11 +954,15 @@ class Table extends common.ServiceObject { this.bigQuery.createJob(body, callback!); } - createExtractJob(destination: File, options?: CreateExtractJobOptions): - Promise; createExtractJob( - destination: File, options: CreateExtractJobOptions, - callback: JobCallback): void; + destination: File, + options?: CreateExtractJobOptions + ): Promise; + createExtractJob( + destination: File, + options: CreateExtractJobOptions, + callback: JobCallback + ): void; createExtractJob(destination: File, callback: JobCallback): void; /** * Export table to Cloud Storage. @@ -983,7 +971,7 @@ class Table extends common.ServiceObject { * * @param {string|File} destination Where the file should be exported * to. A string or a {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} + * https://googleapis.dev/nodejs/storage/latest/File.html File} * object. * @param {object=} options - The configuration object. * @param {string} options.format - The format to export the data in. Allowed @@ -1019,7 +1007,7 @@ class Table extends common.ServiceObject { * * //- * // To use the default options, just pass a {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} + * https://googleapis.dev/nodejs/storage/latest/File.html File} * object. * // * // Note: The exported format type will be inferred by the file's extension. @@ -1055,16 +1043,17 @@ class Table extends common.ServiceObject { * }); */ createExtractJob( - destination: File, - optionsOrCallback?: CreateExtractJobOptions|JobCallback, - cb?: JobCallback): void|Promise { + destination: File, + optionsOrCallback?: CreateExtractJobOptions | JobCallback, + cb?: JobCallback + ): void | Promise { let options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; options = extend(true, options, { - destinationUris: arrify(destination).map((dest) => { + destinationUris: arrify(destination).map(dest => { if (!common.util.isCustomType(dest, 'storage/file')) { throw new Error('Destination must be a File object.'); } @@ -1072,7 +1061,10 @@ class Table extends common.ServiceObject { // If no explicit format was provided, attempt to find a match from the // file's extension. If no match, don't set, and default upstream to // CSV. - const format = path.extname(dest.name).substr(1).toLowerCase(); + const format = path + .extname(dest.name) + .substr(1) + .toLowerCase(); if (!options.destinationFormat && !options.format && FORMATS[format]) { options.destinationFormat = FORMATS[format]; } @@ -1097,7 +1089,7 @@ class Table extends common.ServiceObject { delete options.gzip; } - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any const body: any = { configuration: { extract: extend(true, options, { @@ -1127,18 +1119,19 @@ class Table extends common.ServiceObject { this.bigQuery.createJob(body, callback!); } - createLoadJob(source: string, metadata?: JobLoadMetadata): Writable; - createLoadJob(source: File, metadata?: JobLoadMetadata): Promise; createLoadJob( - source: string, metadata: JobLoadMetadata, - callback: JobCallback): Writable; - createLoadJob(source: File, metadata: JobLoadMetadata, callback: JobCallback): - void; - createLoadJob(source: string, callback: JobCallback): Writable; - createLoadJob(source: File, callback: JobCallback): void; + source: string | File, + metadata?: JobLoadMetadata + ): Promise; + createLoadJob( + source: string | File, + metadata: JobLoadMetadata, + callback: JobCallback + ): void; + createLoadJob(source: string | File, callback: JobCallback): void; /** * Load data from a local file or Storage {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}. + * https://googleapis.dev/nodejs/storage/latest/File.html File}. * * By loading data this way, you create a load job that will run your data * load asynchronously. If you would like instantaneous access to your data, @@ -1149,14 +1142,14 @@ class Table extends common.ServiceObject { * * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} * - * @param {string|File} source The source file to load. A string or a - * {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} - * object. + * @param {string|File|File[]} source The source file to load. A string (path) + * to a local file, or one or more {@link + * https://googleapis.dev/nodejs/storage/latest/File.html File} + * objects. * @param {object} [metadata] Metadata to set with the load operation. The * metadata object should be in the format of the - * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs - * resource. + * [`configuration.load`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) + * property of a Jobs resource. * @param {string} [metadata.format] The format the data being loaded is in. * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET". * @param {string} [metadata.jobId] Custom job id. @@ -1188,7 +1181,8 @@ class Table extends common.ServiceObject { * * //- * // You may also pass in metadata in the format of a Jobs resource. See - * // (http://goo.gl/BVcXk4) for a full list of supported values. + * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) + * // for a full list of supported values. * //- * const metadata = { * encoding: 'ISO-8859-1', @@ -1223,14 +1217,31 @@ class Table extends common.ServiceObject { * }); */ createLoadJob( - source: string|File, metadataOrCallback?: JobLoadMetadata|JobCallback, - cb?: JobCallback): void|Promise|Writable { + source: string | File | File[], + metadataOrCallback?: JobLoadMetadata | JobCallback, + cb?: JobCallback + ): void | Promise { const metadata = - typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; - const callback = typeof metadataOrCallback === 'function' ? - metadataOrCallback : - cb || common.util.noop; + typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; + const callback = + typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; + this._createLoadJob(source, metadata).then( + ([resp]) => callback!(null, resp, resp.metadata), + err => callback!(err) + ); + } + + /** + * @param {string | File | File[]} source + * @param {JobLoadMetadata} metadata + * @returns {Promise} + * @private + */ + async _createLoadJob( + source: string | File | File[], + metadata: JobLoadMetadata + ): Promise { if (metadata.format) { metadata.sourceFormat = FORMATS[metadata.format.toLowerCase()]; delete metadata.format; @@ -1244,21 +1255,25 @@ class Table extends common.ServiceObject { // A path to a file was given. If a sourceFormat wasn't specified, try to // find a match from the file's extension. const detectedFormat = - FORMATS[path.extname(source).substr(1).toLowerCase()]; + FORMATS[ + path + .extname(source) + .substr(1) + .toLowerCase() + ]; if (!metadata.sourceFormat && detectedFormat) { metadata.sourceFormat = detectedFormat; } // Read the file into a new write stream. - return fs.createReadStream(source) - .pipe(this.createWriteStream_(metadata)) - .on('error', callback) - .on('job', (job) => { - callback(null, job, job.metadata); - }); + const jobWritable = fs + .createReadStream(source) + .pipe(this.createWriteStream_(metadata)); + const jobResponse = (await pEvent(jobWritable, 'job')) as Job; + return [jobResponse, jobResponse.metadata]; } - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any const body: any = { configuration: { load: { @@ -1287,7 +1302,7 @@ class Table extends common.ServiceObject { } extend(true, body.configuration.load, metadata, { - sourceUris: arrify(source).map((src) => { + sourceUris: arrify(source).map(src => { if (!common.util.isCustomType(src, 'storage/file')) { throw new Error('Source must be a File object.'); } @@ -1295,7 +1310,13 @@ class Table extends common.ServiceObject { // If no explicit format was provided, attempt to find a match from // the file's extension. If no match, don't set, and default upstream // to CSV. - const format = FORMATS[path.extname(src.name).substr(1).toLowerCase()]; + const format = + FORMATS[ + path + .extname(src.name) + .substr(1) + .toLowerCase() + ]; if (!metadata.sourceFormat && format) { body.configuration.load.sourceFormat = format; } @@ -1303,7 +1324,7 @@ class Table extends common.ServiceObject { }), }); - this.bigQuery.createJob(body, callback); + return this.bigQuery.createJob(body); } createQueryJob(options: Query): Promise; @@ -1316,8 +1337,10 @@ class Table extends common.ServiceObject { * * See {@link BigQuery#createQueryJob} for full documentation of this method. */ - createQueryJob(options: Query, callback?: JobCallback): - void|Promise { + createQueryJob( + options: Query, + callback?: JobCallback + ): void | Promise { return this.dataset.createQueryJob(options, callback!); } @@ -1344,18 +1367,15 @@ class Table extends common.ServiceObject { * * @param {string|object} [metadata] Metadata to set with the load operation. * The metadata object should be in the format of the - * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs - * resource. If a string is given, it will be used as the filetype. + * [`configuration.load`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) + * property of a Jobs resource. If a string is given, it will be used + * as the filetype. * @param {string} [metadata.jobId] Custom job id. * @param {string} [metadata.jobPrefix] Prefix to apply to the job id. * @returns {WritableStream} */ - createWriteStream_(metadata: JobLoadMetadata|string): Writable { + createWriteStream_(metadata: JobLoadMetadata | string): Writable { metadata = metadata || {}; - const fileTypes = Object.keys(FORMATS).map((key) => { - return FORMATS[key]; - }); - if (typeof metadata === 'string') { metadata = { sourceFormat: FORMATS[metadata.toLowerCase()], @@ -1366,13 +1386,17 @@ class Table extends common.ServiceObject { metadata.schema = Table.createSchemaFromString_(metadata.schema); } - extend(true, metadata, { - destinationTable: { - projectId: this.bigQuery.projectId, - datasetId: this.dataset.id, - tableId: this.id, + metadata = extend( + true, + { + destinationTable: { + projectId: this.bigQuery.projectId, + datasetId: this.dataset.id, + tableId: this.id, + }, }, - }); + metadata + ); let jobId = metadata.jobId || uuid.v4(); @@ -1385,43 +1409,36 @@ class Table extends common.ServiceObject { delete metadata.jobPrefix; } - if (metadata.hasOwnProperty('sourceFormat') && - fileTypes.indexOf(metadata.sourceFormat!) < 0) { - throw new Error(`Source format not recognized: ${metadata.sourceFormat}`); - } - - const dup = streamEvents(duplexify()) as duplexify.Duplexify; + const dup = streamEvents(duplexify()); dup.once('writing', () => { common.util.makeWritableStream( - dup, { - makeAuthenticatedRequest: this.bigQuery.makeAuthenticatedRequest, - requestModule: teenyRequest as typeof r, - metadata: { - configuration: { - load: metadata, - }, - jobReference: { - jobId, - projectId: this.bigQuery.projectId, - location: this.location, - }, - } as {}, - request: { - uri: format('{base}/{projectId}/jobs', { - base: 'https://www.googleapis.com/upload/bigquery/v2/projects', - projectId: this.bigQuery.projectId, - }), + dup, + { + makeAuthenticatedRequest: this.bigQuery.makeAuthenticatedRequest, + metadata: { + configuration: { + load: metadata, + }, + jobReference: { + jobId, + projectId: this.bigQuery.projectId, + location: this.location, }, + } as {}, + request: { + uri: `${this.bigQuery.apiEndpoint}/upload/bigquery/v2/projects/${this.bigQuery.projectId}/jobs`, }, - // tslint:disable-next-line no-any - (data: any) => { - const job = this.bigQuery.job(data.jobReference.jobId, { - location: data.jobReference.location, - }); - job.metadata = data; - dup.emit('job', job); + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (data: any) => { + const job = this.bigQuery.job(data.jobReference.jobId, { + location: data.jobReference.location, }); + job.metadata = data; + dup.emit('job', job); + } + ); }); return dup; } @@ -1434,8 +1451,9 @@ class Table extends common.ServiceObject { * * @param {string|object} [metadata] Metadata to set with the load operation. * The metadata object should be in the format of the - * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs - * resource. If a string is given, it will be used as the filetype. + * [`configuration.load`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) + * property of a Jobs resource. If a string is given, + * it will be used as the filetype. * @param {string} [metadata.jobId] Custom job id. * @param {string} [metadata.jobPrefix] Prefix to apply to the job id. * @returns {WritableStream} @@ -1485,34 +1503,40 @@ class Table extends common.ServiceObject { * // The job has completed successfully. * }); */ - createWriteStream(metadata: JobLoadMetadata|string) { + createWriteStream(metadata: JobLoadMetadata | string) { const stream = this.createWriteStream_(metadata); stream.on('prefinish', () => { stream.cork(); }); stream.on('job', (job: Job) => { - job.on('error', err => { - stream.destroy(err); - }).on('complete', () => { - stream.emit('complete', job); - stream.uncork(); - }); + job + .on('error', err => { + stream.destroy(err); + }) + .on('complete', () => { + stream.emit('complete', job); + stream.uncork(); + }); }); return stream; } - extract(destination: File, options?: CreateExtractJobOptions): - Promise; extract( - destination: File, options: CreateExtractJobOptions, - callback?: JobMetadataCallback): void; + destination: File, + options?: CreateExtractJobOptions + ): Promise; + extract( + destination: File, + options: CreateExtractJobOptions, + callback?: JobMetadataCallback + ): void; extract(destination: File, callback?: JobMetadataCallback): void; /** * Export table to Cloud Storage. * * @param {string|File} destination Where the file should be exported * to. A string or a {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}. + * https://googleapis.dev/nodejs/storage/latest/File.html File}. * @param {object} [options] The configuration object. * @param {string} [options.format="CSV"] The format to export the data in. * Allowed options are "AVRO", "CSV", "JSON", "ORC" or "PARQUET". @@ -1542,7 +1566,7 @@ class Table extends common.ServiceObject { * * //- * // To use the default options, just pass a {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} + * https://googleapis.dev/nodejs/storage/latest/File.html File} * object. * // * // Note: The exported format type will be inferred by the file's extension. @@ -1577,19 +1601,20 @@ class Table extends common.ServiceObject { * }); */ extract( - destination: File, - optionsOrCallback?: CreateExtractJobOptions|JobMetadataCallback, - cb?: JobMetadataCallback): void|Promise { + destination: File, + optionsOrCallback?: CreateExtractJobOptions | JobMetadataCallback, + cb?: JobMetadataCallback + ): void | Promise { const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; this.createExtractJob(destination, options, (err, job, resp) => { if (err) { callback!(err, resp); return; } - job!.on('error', callback!).on('complete', (metadata) => { + job!.on('error', callback!).on('complete', metadata => { callback!(null, metadata); }); }); @@ -1609,6 +1634,11 @@ class Table extends common.ServiceObject { * automatically. * @param {number} [options.maxApiCalls] Maximum number of API calls to make. * @param {number} [options.maxResults] Maximum number of results to return. + * @param {boolean|IntegerTypeCastOptions} [options.wrapIntegers=false] Wrap values + * of 'INT64' type in {@link BigQueryInt} objects. + * If a `boolean`, this will wrap values in {@link BigQueryInt} objects. + * If an `object`, this will return a value returned by + * `wrapIntegers.integerTypeCastFunction`. * @param {function} [callback] The callback function. * @param {?error} callback.err An error returned while making this request * @param {array} callback.rows The table data from specified set of rows. @@ -1648,88 +1678,116 @@ class Table extends common.ServiceObject { * const rows = data[0]; * }); */ - getRows(optionsOrCallback?: GetRowsOptions|RowsCallback, cb?: RowsCallback): - void|Promise { + getRows( + optionsOrCallback?: GetRowsOptions | RowsCallback, + cb?: RowsCallback + ): void | Promise { const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - const onComplete = - (err: Error|null, rows: TableRow[]|null, nextQuery: GetRowsOptions|null, - resp: r.Response) => { - if (err) { - callback!(err, null, null, resp); - return; - } - rows = - BigQuery.mergeSchemaWithRows_(this.metadata.schema, rows || []); - callback!(null, rows, nextQuery, resp); - }; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + const wrapIntegers = options.wrapIntegers ? options.wrapIntegers : false; + delete options.wrapIntegers; + const onComplete = ( + err: Error | null, + rows: TableRow[] | null, + nextQuery: GetRowsOptions | null, + resp: bigquery.ITableList + ) => { + if (err) { + callback!(err, null, null, resp); + return; + } + rows = BigQuery.mergeSchemaWithRows_( + this.metadata.schema, + rows || [], + wrapIntegers, + options.selectedFields ? options.selectedFields!.split(',') : [] + ); + callback!(null, rows, nextQuery, resp); + }; this.request( - { - uri: '/data', - qs: options, - }, - (err, resp) => { - if (err) { - onComplete(err, null, null, resp); - return; - } - let nextQuery: GetRowsOptions|null = null; - if (resp.pageToken) { - nextQuery = extend({}, options, { - pageToken: resp.pageToken, - }); - } - - if (resp.rows && resp.rows.length > 0 && !this.metadata.schema) { - // We don't know the schema for this table yet. Do a quick stat. - this.getMetadata( - (err: Error, metadata: common.Metadata, - apiResponse: r.Response) => { - if (err) { - onComplete(err, null, null, apiResponse!); - return; - } - onComplete(null, resp.rows, nextQuery, resp); - }); - return; - } - - onComplete(null, resp.rows, nextQuery, resp); - }); + { + uri: '/data', + qs: options, + }, + (err, resp) => { + if (err) { + onComplete(err, null, null, resp); + return; + } + let nextQuery: GetRowsOptions | null = null; + if (resp.pageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.pageToken, + }); + } + + if (resp.rows && resp.rows.length > 0 && !this.metadata.schema) { + // We don't know the schema for this table yet. Do a quick stat. + this.getMetadata( + ( + err: Error, + metadata: common.Metadata, + apiResponse: bigquery.ITable + ) => { + if (err) { + onComplete(err, null, null, apiResponse!); + return; + } + onComplete(null, resp.rows, nextQuery, resp); + } + ); + return; + } + + onComplete(null, resp.rows, nextQuery, resp); + } + ); } - insert(rows: RowMetadata|RowMetadata[], options?: InsertRowsOptions): - Promise; insert( - rows: RowMetadata|RowMetadata[], options: InsertRowsOptions, - callback: ApiResponseCallback): void; - insert(rows: RowMetadata|RowMetadata[], callback: ApiResponseCallback): void; + rows: RowMetadata | RowMetadata[], + options?: InsertRowsOptions + ): Promise; + insert( + rows: RowMetadata | RowMetadata[], + options: InsertRowsOptions, + callback: InsertRowsCallback + ): void; + insert(rows: RowMetadata | RowMetadata[], callback: InsertRowsCallback): void; /** * Stream data into BigQuery one record at a time without running a load job. * * If you need to create an entire table from a file, consider using * {@link Table#load} instead. * + * Note, if a table was recently created, inserts may fail until the table + * is consistent within BigQuery. If a `schema` is supplied, this method will + * automatically retry those failed inserts, and it will even create the + * table with the provided schema if it does not exist. + * * @see [Tabledata: insertAll API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll} * @see [Streaming Insert Limits]{@link https://cloud.google.com/bigquery/quotas#streaming_inserts} * @see [Troubleshooting Errors]{@link https://developers.google.com/bigquery/troubleshooting-errors} * * @param {object|object[]} rows The rows to insert into the table. * @param {object} [options] Configuration object. - * @param {boolean} [options.autoCreate] Automatically create the table if it - * doesn't already exist. In order for this to succeed the `schema` option - * must also be set. Note that this can take longer than 2 minutes to - * complete. + * @param {boolean} [options.createInsertId=true] Automatically insert a + * default row id when one is not provided. * @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain * values that do not match the schema. The unknown values are ignored. + * @param {number} [options.partialRetries=3] Number of times to retry + * inserting rows for cases of partial failures. * @param {boolean} [options.raw] If `true`, the `rows` argument is expected to * be formatted as according to the * [specification](https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll). - * @param {string|object} [options.schema] A comma-separated list of name:type - * pairs. Valid types are "string", "integer", "float", "boolean", and + * @param {string|object} [options.schema] If provided will automatically + * create a table if it doesn't already exist. Note that this can take + * longer than 2 minutes to complete. A comma-separated list of + * name:type pairs. + * Valid types are "string", "integer", "float", "boolean", and * "timestamp". If the type is omitted, it is assumed to be "string". * Example: "name:string, age:integer". Schemas can also be specified as a * JSON array of fields, which allows for nested and repeated fields. See @@ -1781,9 +1839,7 @@ class Table extends common.ServiceObject { * table.insert(rows, insertHandler); * * //- - * // Insert a row as according to the - * // specification. + * // Insert a row as according to the specification. * //- * const row = { * insertId: '1', @@ -1801,9 +1857,7 @@ class Table extends common.ServiceObject { * table.insert(row, options, insertHandler); * * //- - * // Handling the response. See - * // Troubleshooting Errors for best practices on how to handle errors. + * // Handling the response. See Troubleshooting Errors for best practices on how to handle errors. * //- * function insertHandler(err, apiResponse) { * if (err) { @@ -1841,117 +1895,189 @@ class Table extends common.ServiceObject { * }); */ insert( - rows: RowMetadata|RowMetadata[], - optionsOrCallback?: InsertRowsOptions|ApiResponseCallback, - cb?: ApiResponseCallback): void|Promise { + rows: RowMetadata | RowMetadata[], + optionsOrCallback?: InsertRowsOptions | InsertRowsCallback, + cb?: InsertRowsCallback + ): void | Promise { const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + typeof optionsOrCallback === 'object' + ? optionsOrCallback + : ({} as InsertRowsOptions); const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + const promise = this._insertAndCreateTable(rows, options); + if (callback) { + promise.then( + resp => callback(null, resp), + err => callback(err, null) + ); + } else { + return promise.then(r => [r]); + } + } - rows = arrify(rows); + /** + * Insert rows with retries, but will create the table if not exists. + * + * @param {RowMetadata | RowMetadata[]} rows + * @param {InsertRowsOptions} options + * @returns {Promise} + * @private + */ + private async _insertAndCreateTable( + rows: RowMetadata | RowMetadata[], + options: InsertRowsOptions + ): Promise { + const {schema} = options; + const delay = 60000; + + try { + return await this._insertWithRetry(rows, options); + } catch (err) { + if ((err as common.ApiError).code !== 404 || !schema) { + throw err; + } + } + + try { + await this.create({schema}); + } catch (err) { + if ((err as common.ApiError).code !== 409) { + throw err; + } + } + + // table creation after failed access is subject to failure caching and + // eventual consistency, see: + // https://github.com/googleapis/google-cloud-python/issues/4553#issuecomment-350110292 + await new Promise(resolve => setTimeout(resolve, delay)); + return this._insertAndCreateTable(rows, options); + } + + /** + * This method will attempt to insert rows while retrying any partial failures + * that occur along the way. Because partial insert failures are returned + * differently, we can't depend on our usual retry strategy. + * + * @private + * + * @param {RowMetadata|RowMetadata[]} rows The rows to insert. + * @param {InsertRowsOptions} options Insert options. + * @returns {Promise} + */ + private async _insertWithRetry( + rows: RowMetadata | RowMetadata[], + options: InsertRowsOptions + ): Promise { + const {partialRetries = 3} = options; + let error: Error; + + const maxAttempts = Math.max(partialRetries, 0) + 1; + + for (let attempts = 0; attempts < maxAttempts; attempts++) { + try { + return await this._insert(rows, options); + } catch (e) { + error = e; + rows = ((e.errors || []) as PartialInsertFailure[]) + .filter(err => !!err.row) + .map(err => err.row); + + if (!rows.length) { + break; + } + } + } + + throw error!; + } + + /** + * This method does the bulk of the work for processing options and making the + * network request. + * + * @private + * + * @param {RowMetadata|RowMetadata[]} rows The rows to insert. + * @param {InsertRowsOptions} options Insert options. + * @returns {Promise} + */ + private async _insert( + rows: RowMetadata | RowMetadata[], + options: InsertRowsOptions + ): Promise { + rows = arrify(rows) as RowMetadata[]; if (!rows.length) { throw new Error('You must provide at least 1 row to be inserted.'); } - const json = extend(true, {}, options, { - rows, - }); + const json = extend(true, {}, options, {rows}); if (!options.raw) { - json.rows = arrify(rows).map((row) => { - return { - insertId: uuid.v4(), - json: Table.encodeValue_(row), + json.rows = rows.map((row: RowMetadata) => { + const encoded: InsertRow = { + json: Table.encodeValue_(row)!, }; + + if (options.createInsertId !== false) { + encoded.insertId = uuid.v4(); + } + + return encoded; }); } + delete json.createInsertId; + delete json.partialRetries; delete json.raw; + delete json.schema; - const autoCreate = !!options.autoCreate; - let schema: string|{}; - - delete json.autoCreate; + const [resp] = await this.request({ + method: 'POST', + uri: '/insertAll', + json, + }); - if (autoCreate) { - if (!options.schema) { - throw new Error( - 'Schema must be provided in order to auto-create Table.'); + const partialFailures = (resp.insertErrors || []).map( + (insertError: GoogleErrorBody) => { + return { + errors: insertError.errors!.map(error => { + return { + message: error.message, + reason: error.reason, + }; + }), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + row: rows[(insertError as any).index], + }; } + ); - schema = options.schema; - delete json.schema; + if (partialFailures.length > 0) { + throw new common.util.PartialFailureError({ + errors: partialFailures, + response: resp, + } as GoogleErrorBody); } - const createTableAndRetry = () => { - this.create( - { - schema, - }, - (err, table, resp) => { - if (err && err.code !== 409) { - callback!(err, resp); - return; - } - - setTimeout(() => { - this.insert(rows, options, callback!); - }, 60000); - }); - }; - - this.request( - { - method: 'POST', - uri: '/insertAll', - json, - }, - (err, resp) => { - if (err) { - if ((err as common.ApiError).code === 404 && autoCreate) { - setTimeout(createTableAndRetry, Math.random() * 60000); - } else { - callback!(err, resp); - } - return; - } - - const partialFailures = - (resp.insertErrors || []).map((insertError: GoogleErrorBody) => { - return { - errors: insertError.errors!.map(error => { - return { - message: error.message, - reason: error.reason, - }; - }), - // tslint:disable-next-line: no-any - row: rows[(insertError as any).index], - }; - }); - - if (partialFailures.length > 0) { - err = new common.util.PartialFailureError({ - errors: partialFailures, - response: resp, - } as GoogleErrorBody); - } - - callback!(err, resp); - }); + return resp; } - load(source: string|File, metadata?: JobLoadMetadata): - Promise; load( - source: string|File, metadata: JobLoadMetadata, - callback: JobMetadataCallback): void; - load(source: string|File, callback: JobMetadataCallback): void; + source: string | File, + metadata?: JobLoadMetadata + ): Promise; + load( + source: string | File, + metadata: JobLoadMetadata, + callback: JobMetadataCallback + ): void; + load(source: string | File, callback: JobMetadataCallback): void; /** * Load data from a local file or Storage {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}. + * https://googleapis.dev/nodejs/storage/latest/File.html File}. * * By loading data this way, you create a load job that will run your data * load asynchronously. If you would like instantaneous access to your data, @@ -1962,12 +2088,12 @@ class Table extends common.ServiceObject { * * @param {string|File} source The source file to load. A filepath as a string * or a {@link - * https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} + * https://googleapis.dev/nodejs/storage/latest/File.html File} * object. * @param {object} [metadata] Metadata to set with the load operation. The * metadata object should be in the format of the - * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs - * resource. + * [`configuration.load`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) + * property of a Jobs resource. * @param {string} [metadata.format] The format the data being loaded is in. * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET". * @param {string} [metadata.jobId] Custom id for the underlying job. @@ -1993,7 +2119,8 @@ class Table extends common.ServiceObject { * * //- * // You may also pass in metadata in the format of a Jobs resource. See - * // (http://goo.gl/BVcXk4) for a full list of supported values. + * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) + * // for a full list of supported values. * //- * const metadata = { * encoding: 'ISO-8859-1', @@ -2027,13 +2154,14 @@ class Table extends common.ServiceObject { * }); */ load( - source: string|File, - metadataOrCallback?: JobLoadMetadata|JobMetadataCallback, - cb?: JobMetadataCallback): void|Promise { + source: string | File, + metadataOrCallback?: JobLoadMetadata | JobMetadataCallback, + cb?: JobMetadataCallback + ): void | Promise { const metadata = - typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; + typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; const callback = - typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; + typeof metadataOrCallback === 'function' ? metadataOrCallback : cb; this.createLoadJob(source as File, metadata, (err, job, resp) => { if (err) { @@ -2041,7 +2169,7 @@ class Table extends common.ServiceObject { return; } - job!.on('error', callback!).on('complete', (metadata) => { + job!.on('error', callback!).on('complete', metadata => { callback!(null, metadata); }); }); @@ -2057,16 +2185,20 @@ class Table extends common.ServiceObject { * @param {function} [callback] See {@link BigQuery#query} for full documentation of this method. * @returns {Promise} */ - query(query: Query, callback?: SimpleQueryRowsCallback): - void|Promise { + query( + query: Query, + callback?: SimpleQueryRowsCallback + ): void | Promise { this.dataset.query(query, callback!); } - setMetadata(metadata: SetTableMetadataOptions): - Promise; setMetadata( - metadata: SetTableMetadataOptions, - callback: common.ResponseCallback): void; + metadata: SetTableMetadataOptions + ): Promise; + setMetadata( + metadata: SetTableMetadataOptions, + callback: common.ResponseCallback + ): void; /** * Set the metadata on the table. * @@ -2111,11 +2243,135 @@ class Table extends common.ServiceObject { * }); */ setMetadata( - metadata: SetTableMetadataOptions, callback?: common.ResponseCallback): - void|Promise { + metadata: SetTableMetadataOptions, + callback?: common.ResponseCallback + ): void | Promise { const body = Table.formatMetadata_(metadata as TableMetadata); super.setMetadata(body, callback!); } + + getIamPolicy( + optionsOrCallback?: GetPolicyOptions | PolicyCallback + ): Promise; + getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void; + /** + * Run a query scoped to your dataset. + * @returns {Promise} + */ + getIamPolicy( + optionsOrCallback?: GetPolicyOptions, + cb?: PolicyCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + if ( + typeof options.requestedPolicyVersion === 'number' && + options.requestedPolicyVersion !== 1 + ) { + throw new Error('Only IAM policy version 1 is supported.'); + } + + const json = extend(true, {}, {options}); + + this.request( + { + method: 'POST', + uri: '/:getIamPolicy', + json, + }, + (err, resp) => { + if (err) { + callback!(err, null); + return; + } + callback!(null, resp); + } + ); + } + + setIamPolicy( + policy: Policy, + options?: SetPolicyOptions + ): Promise; + setIamPolicy( + policy: Policy, + options: SetPolicyOptions, + callback: PolicyCallback + ): void; + setIamPolicy(policy: Policy, callback: PolicyCallback): void; + /** + * Run a query scoped to your dataset. + * @returns {Promise} + */ + setIamPolicy( + policy: Policy, + optionsOrCallback?: SetPolicyOptions | PolicyCallback, + cb?: PolicyCallback + ): void | Promise { + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + + if (policy.version && policy.version !== 1) { + throw new Error('Only IAM policy version 1 is supported.'); + } + + const json = extend(true, {}, options, {policy}); + + this.request( + { + method: 'POST', + uri: '/:setIamPolicy', + json, + }, + (err, resp) => { + if (err) { + callback!(err, null); + return; + } + callback!(null, resp); + } + ); + } + + testIamPermissions( + permissions: string | string[] + ): Promise; + testIamPermissions( + permissions: string | string[], + callback: PermissionsCallback + ): void; + /** + * Run a query scoped to your dataset. + * @returns {Promise} + */ + testIamPermissions( + permissions: string | string[], + callback?: PermissionsCallback + ): void | Promise { + permissions = arrify(permissions); + + const json = extend(true, {}, {permissions}); + + this.request( + { + method: 'POST', + uri: '/:testIamPermissions', + json, + }, + (err, resp) => { + if (err) { + callback!(err, null); + return; + } + callback!(null, resp); + } + ); + } } /*! Developer Documentation diff --git a/src/types.d.ts b/src/types.d.ts new file mode 100644 index 00000000..a59da1db --- /dev/null +++ b/src/types.d.ts @@ -0,0 +1,3705 @@ +/** + * BigQuery API + */ +declare namespace bigquery { + /** + * Aggregate metrics for classification/classifier models. For multi-class models, the metrics are either macro-averaged or micro-averaged. When macro-averaged, the metrics are calculated for each label and then an unweighted average is taken of those values. When micro-averaged, the metric is calculated globally by counting the total number of correctly predicted rows. + */ + type IAggregateClassificationMetrics = { + /** + * Accuracy is the fraction of predictions given the correct label. For multiclass this is a micro-averaged metric. + */ + accuracy?: number; + /** + * The F1 score is an average of recall and precision. For multiclass this is a macro-averaged metric. + */ + f1Score?: number; + /** + * Logarithmic Loss. For multiclass this is a macro-averaged metric. + */ + logLoss?: number; + /** + * Precision is the fraction of actual positive predictions that had positive actual labels. For multiclass this is a macro-averaged metric treating each class as a binary classifier. + */ + precision?: number; + /** + * Recall is the fraction of actual positive labels that were given a positive prediction. For multiclass this is a macro-averaged metric. + */ + recall?: number; + /** + * Area Under a ROC Curve. For multiclass this is a macro-averaged metric. + */ + rocAuc?: number; + /** + * Threshold at which the metrics are computed. For binary classification models this is the positive class threshold. For multi-class classfication models this is the confidence threshold. + */ + threshold?: number; + }; + + /** + * Input/output argument of a function or a stored procedure. + */ + type IArgument = { + /** + * Optional. Defaults to FIXED_TYPE. + */ + argumentKind?: 'ARGUMENT_KIND_UNSPECIFIED' | 'FIXED_TYPE' | 'ANY_TYPE'; + /** + * Required unless argument_kind = ANY_TYPE. + */ + dataType?: IStandardSqlDataType; + /** + * Optional. Specifies whether the argument is input or output. Can be set for procedures only. + */ + mode?: 'MODE_UNSPECIFIED' | 'IN' | 'OUT' | 'INOUT'; + /** + * Optional. The name of this argument. Can be absent for function return argument. + */ + name?: string; + }; + + /** + * Arima coefficients. + */ + type IArimaCoefficients = { + /** + * Auto-regressive coefficients, an array of double. + */ + autoRegressiveCoefficients?: Array; + /** + * Intercept coefficient, just a double not an array. + */ + interceptCoefficient?: number; + /** + * Moving-average coefficients, an array of double. + */ + movingAverageCoefficients?: Array; + }; + + /** + * ARIMA model fitting metrics. + */ + type IArimaFittingMetrics = { + /** + * AIC. + */ + aic?: number; + /** + * Log-likelihood. + */ + logLikelihood?: number; + /** + * Variance. + */ + variance?: number; + }; + + /** + * Model evaluation metrics for ARIMA forecasting models. + */ + type IArimaForecastingMetrics = { + /** + * Arima model fitting metrics. + */ + arimaFittingMetrics?: Array; + /** + * Repeated as there can be many metric sets (one for each model) in auto-arima and the large-scale case. + */ + arimaSingleModelForecastingMetrics?: Array< + IArimaSingleModelForecastingMetrics + >; + /** + * Whether Arima model fitted with drift or not. It is always false when d is not 1. + */ + hasDrift?: Array; + /** + * Non-seasonal order. + */ + nonSeasonalOrder?: Array; + /** + * Seasonal periods. Repeated because multiple periods are supported for one time series. + */ + seasonalPeriods?: Array< + | 'SEASONAL_PERIOD_TYPE_UNSPECIFIED' + | 'NO_SEASONALITY' + | 'DAILY' + | 'WEEKLY' + | 'MONTHLY' + | 'QUARTERLY' + | 'YEARLY' + >; + /** + * Id to differentiate different time series for the large-scale case. + */ + timeSeriesId?: Array; + }; + + /** + * Arima model information. + */ + type IArimaModelInfo = { + /** + * Arima coefficients. + */ + arimaCoefficients?: IArimaCoefficients; + /** + * Arima fitting metrics. + */ + arimaFittingMetrics?: IArimaFittingMetrics; + /** + * Whether Arima model fitted with drift or not. It is always false when d is not 1. + */ + hasDrift?: boolean; + /** + * Non-seasonal order. + */ + nonSeasonalOrder?: IArimaOrder; + /** + * Seasonal periods. Repeated because multiple periods are supported for one time series. + */ + seasonalPeriods?: Array< + | 'SEASONAL_PERIOD_TYPE_UNSPECIFIED' + | 'NO_SEASONALITY' + | 'DAILY' + | 'WEEKLY' + | 'MONTHLY' + | 'QUARTERLY' + | 'YEARLY' + >; + /** + * The id to indicate different time series. + */ + timeSeriesId?: string; + }; + + /** + * Arima order, can be used for both non-seasonal and seasonal parts. + */ + type IArimaOrder = { + /** + * Order of the differencing part. + */ + d?: string; + /** + * Order of the autoregressive part. + */ + p?: string; + /** + * Order of the moving-average part. + */ + q?: string; + }; + + /** + * (Auto-)arima fitting result. Wrap everything in ArimaResult for easier refactoring if we want to use model-specific iteration results. + */ + type IArimaResult = { + /** + * This message is repeated because there are multiple arima models fitted in auto-arima. For non-auto-arima model, its size is one. + */ + arimaModelInfo?: Array; + /** + * Seasonal periods. Repeated because multiple periods are supported for one time series. + */ + seasonalPeriods?: Array< + | 'SEASONAL_PERIOD_TYPE_UNSPECIFIED' + | 'NO_SEASONALITY' + | 'DAILY' + | 'WEEKLY' + | 'MONTHLY' + | 'QUARTERLY' + | 'YEARLY' + >; + }; + + /** + * Model evaluation metrics for a single ARIMA forecasting model. + */ + type IArimaSingleModelForecastingMetrics = { + /** + * Arima fitting metrics. + */ + arimaFittingMetrics?: IArimaFittingMetrics; + /** + * Is arima model fitted with drift or not. It is always false when d is not 1. + */ + hasDrift?: boolean; + /** + * Non-seasonal order. + */ + nonSeasonalOrder?: IArimaOrder; + /** + * Seasonal periods. Repeated because multiple periods are supported for one time series. + */ + seasonalPeriods?: Array< + | 'SEASONAL_PERIOD_TYPE_UNSPECIFIED' + | 'NO_SEASONALITY' + | 'DAILY' + | 'WEEKLY' + | 'MONTHLY' + | 'QUARTERLY' + | 'YEARLY' + >; + /** + * The id to indicate different time series. + */ + timeSeriesId?: string; + }; + + /** + * Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging. + */ + type IAuditConfig = { + /** + * The configuration for logging of each type of permission. + */ + auditLogConfigs?: Array; + /** + * Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services. + */ + service?: string; + }; + + /** + * Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging. + */ + type IAuditLogConfig = { + /** + * Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members. + */ + exemptedMembers?: Array; + /** + * The log type that this config enables. + */ + logType?: + | 'LOG_TYPE_UNSPECIFIED' + | 'ADMIN_READ' + | 'DATA_WRITE' + | 'DATA_READ'; + }; + + type IBigQueryModelTraining = { + /** + * [Output-only, Beta] Index of current ML training iteration. Updated during create model query job to show job progress. + */ + currentIteration?: number; + /** + * [Output-only, Beta] Expected number of iterations for the create model query job specified as num_iterations in the input query. The actual total number of iterations may be less than this number due to early stop. + */ + expectedTotalIterations?: string; + }; + + type IBigtableColumn = { + /** + * [Optional] The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels. + */ + encoding?: string; + /** + * [Optional] If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries. + */ + fieldName?: string; + /** + * [Optional] If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels. + */ + onlyReadLatest?: boolean; + /** + * [Required] Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifier_string field. Otherwise, a base-64 encoded value must be set to qualifier_encoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as field_name. + */ + qualifierEncoded?: string; + qualifierString?: string; + /** + * [Optional] The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is BYTES. 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels. + */ + type?: string; + }; + + type IBigtableColumnFamily = { + /** + * [Optional] Lists of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as .. Other columns can be accessed as a list through .Column field. + */ + columns?: Array; + /** + * [Optional] The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it. + */ + encoding?: string; + /** + * Identifier of the column family. + */ + familyId?: string; + /** + * [Optional] If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. + */ + onlyReadLatest?: boolean; + /** + * [Optional] The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it. + */ + type?: string; + }; + + type IBigtableOptions = { + /** + * [Optional] List of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable. + */ + columnFamilies?: Array; + /** + * [Optional] If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false. + */ + ignoreUnspecifiedColumnFamilies?: boolean; + /** + * [Optional] If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false. + */ + readRowkeyAsString?: boolean; + }; + + /** + * Evaluation metrics for binary classification/classifier models. + */ + type IBinaryClassificationMetrics = { + /** + * Aggregate classification metrics. + */ + aggregateClassificationMetrics?: IAggregateClassificationMetrics; + /** + * Binary confusion matrix at multiple thresholds. + */ + binaryConfusionMatrixList?: Array; + /** + * Label representing the negative class. + */ + negativeLabel?: string; + /** + * Label representing the positive class. + */ + positiveLabel?: string; + }; + + /** + * Confusion matrix for binary classification models. + */ + type IBinaryConfusionMatrix = { + /** + * The fraction of predictions given the correct label. + */ + accuracy?: number; + /** + * The equally weighted average of recall and precision. + */ + f1Score?: number; + /** + * Number of false samples predicted as false. + */ + falseNegatives?: string; + /** + * Number of false samples predicted as true. + */ + falsePositives?: string; + /** + * Threshold value used when computing each of the following metric. + */ + positiveClassThreshold?: number; + /** + * The fraction of actual positive predictions that had positive actual labels. + */ + precision?: number; + /** + * The fraction of actual positive labels that were given a positive prediction. + */ + recall?: number; + /** + * Number of true samples predicted as false. + */ + trueNegatives?: string; + /** + * Number of true samples predicted as true. + */ + truePositives?: string; + }; + + /** + * Associates `members` with a `role`. + */ + type IBinding = { + /** + * The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the members in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + */ + condition?: IExpr; + /** + * Specifies the identities requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`. + */ + members?: Array; + /** + * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or `roles/owner`. + */ + role?: string; + }; + + type IBqmlIterationResult = { + /** + * [Output-only, Beta] Time taken to run the training iteration in milliseconds. + */ + durationMs?: string; + /** + * [Output-only, Beta] Eval loss computed on the eval data at the end of the iteration. The eval loss is used for early stopping to avoid overfitting. No eval loss if eval_split_method option is specified as no_split or auto_split with input data size less than 500 rows. + */ + evalLoss?: number; + /** + * [Output-only, Beta] Index of the ML training iteration, starting from zero for each training run. + */ + index?: number; + /** + * [Output-only, Beta] Learning rate used for this iteration, it varies for different training iterations if learn_rate_strategy option is not constant. + */ + learnRate?: number; + /** + * [Output-only, Beta] Training loss computed on the training data at the end of the iteration. The training loss function is defined by model type. + */ + trainingLoss?: number; + }; + + type IBqmlTrainingRun = { + /** + * [Output-only, Beta] List of each iteration results. + */ + iterationResults?: Array; + /** + * [Output-only, Beta] Training run start time in milliseconds since the epoch. + */ + startTime?: string; + /** + * [Output-only, Beta] Different state applicable for a training run. IN PROGRESS: Training run is in progress. FAILED: Training run ended due to a non-retryable failure. SUCCEEDED: Training run successfully completed. CANCELLED: Training run cancelled by the user. + */ + state?: string; + /** + * [Output-only, Beta] Training options used by this training run. These options are mutable for subsequent training runs. Default values are explicitly stored for options not specified in the input query of the first training run. For subsequent training runs, any option not explicitly specified in the input query will be copied from the previous training run. + */ + trainingOptions?: { + earlyStop?: boolean; + l1Reg?: number; + l2Reg?: number; + learnRate?: number; + learnRateStrategy?: string; + lineSearchInitLearnRate?: number; + maxIteration?: string; + minRelProgress?: number; + warmStart?: boolean; + }; + }; + + /** + * Representative value of a categorical feature. + */ + type ICategoricalValue = { + /** + * Counts of all categories for the categorical feature. If there are more than ten categories, we return top ten (by count) and return one more CategoryCount with category "_OTHER_" and count as aggregate counts of remaining categories. + */ + categoryCounts?: Array; + }; + + /** + * Represents the count of a single category within the cluster. + */ + type ICategoryCount = { + /** + * The name of category. + */ + category?: string; + /** + * The count of training samples matching the category within the cluster. + */ + count?: string; + }; + + /** + * Message containing the information about one cluster. + */ + type ICluster = { + /** + * Centroid id. + */ + centroidId?: string; + /** + * Count of training data rows that were assigned to this cluster. + */ + count?: string; + /** + * Values of highly variant features for this cluster. + */ + featureValues?: Array; + }; + + /** + * Information about a single cluster for clustering model. + */ + type IClusterInfo = { + /** + * Centroid id. + */ + centroidId?: string; + /** + * Cluster radius, the average distance from centroid to each point assigned to the cluster. + */ + clusterRadius?: number; + /** + * Cluster size, the total number of points assigned to the cluster. + */ + clusterSize?: string; + }; + + type IClustering = { + /** + * [Repeated] One or more fields on which data should be clustered. Only top-level, non-repeated, simple-type fields are supported. When you cluster a table using multiple columns, the order of columns you specify is important. The order of the specified columns determines the sort order of the data. + */ + fields?: Array; + }; + + /** + * Evaluation metrics for clustering models. + */ + type IClusteringMetrics = { + /** + * [Beta] Information for all clusters. + */ + clusters?: Array; + /** + * Davies-Bouldin index. + */ + daviesBouldinIndex?: number; + /** + * Mean of squared distances between each sample to its cluster centroid. + */ + meanSquaredDistance?: number; + }; + + /** + * Confusion matrix for multi-class classification models. + */ + type IConfusionMatrix = { + /** + * Confidence threshold used when computing the entries of the confusion matrix. + */ + confidenceThreshold?: number; + /** + * One row per actual label. + */ + rows?: Array; + }; + + type IConnectionProperty = { + /** + * [Required] Name of the connection property to set. + */ + key?: string; + /** + * [Required] Value of the connection property. + */ + value?: string; + }; + + type ICsvOptions = { + /** + * [Optional] Indicates if BigQuery should accept rows that are missing trailing optional columns. If true, BigQuery treats missing trailing columns as null values. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. + */ + allowJaggedRows?: boolean; + /** + * [Optional] Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false. + */ + allowQuotedNewlines?: boolean; + /** + * [Optional] The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split using the values of the quote and fieldDelimiter properties. + */ + encoding?: string; + /** + * [Optional] The separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. The default value is a comma (','). + */ + fieldDelimiter?: string; + /** + * [Optional] The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true. + */ + quote?: string; + /** + * [Optional] The number of rows at the top of a CSV file that BigQuery will skip when reading the data. The default value is 0. This property is useful if you have header rows in the file that should be skipped. When autodetect is on, the behavior is the following: * skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, the row is read as data. Otherwise data is read starting from the second row. * skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, row N is just skipped. Otherwise row N is used to extract column names for the detected schema. + */ + skipLeadingRows?: string; + }; + + /** + * Data split result. This contains references to the training and evaluation data tables that were used to train the model. + */ + type IDataSplitResult = { + /** + * Table reference of the evaluation data after split. + */ + evaluationTable?: ITableReference; + /** + * Table reference of the training data after split. + */ + trainingTable?: ITableReference; + }; + + type IDataset = { + /** + * [Optional] An array of objects that define dataset access for one or more entities. You can set this property when inserting or updating a dataset in order to control who is allowed to access the data. If unspecified at dataset creation time, BigQuery adds default dataset access for the following entities: access.specialGroup: projectReaders; access.role: READER; access.specialGroup: projectWriters; access.role: WRITER; access.specialGroup: projectOwners; access.role: OWNER; access.userByEmail: [dataset creator email]; access.role: OWNER; + */ + access?: Array<{ + /** + * [Pick one] A domain to grant access to. Any users signed in with the domain specified will be granted the specified access. Example: "example.com". Maps to IAM policy member "domain:DOMAIN". + */ + domain?: string; + /** + * [Pick one] An email address of a Google Group to grant access to. Maps to IAM policy member "group:GROUP". + */ + groupByEmail?: string; + /** + * [Pick one] Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. + */ + iamMember?: string; + /** + * [Required] An IAM role ID that should be granted to the user, group, or domain specified in this access entry. The following legacy mappings will be applied: OWNER roles/bigquery.dataOwner WRITER roles/bigquery.dataEditor READER roles/bigquery.dataViewer This field will accept any of the above formats, but will return only the legacy format. For example, if you set this field to "roles/bigquery.dataOwner", it will be returned back as "OWNER". + */ + role?: string; + /** + * [Pick one] A routine from a different dataset to grant access to. Queries executed against that routine will have read access to views/tables/routines in this dataset. Only UDF is supported for now. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. + */ + routine?: IRoutineReference; + /** + * [Pick one] A special group to grant access to. Possible values include: projectOwners: Owners of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters: Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users. Maps to similarly-named IAM members. + */ + specialGroup?: string; + /** + * [Pick one] An email address of a user to grant access to. For example: fred@example.com. Maps to IAM policy member "user:EMAIL" or "serviceAccount:EMAIL". + */ + userByEmail?: string; + /** + * [Pick one] A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. + */ + view?: ITableReference; + }>; + /** + * [Output-only] The time when this dataset was created, in milliseconds since the epoch. + */ + creationTime?: string; + /** + * [Required] A reference that identifies the dataset. + */ + datasetReference?: IDatasetReference; + defaultEncryptionConfiguration?: IEncryptionConfiguration; + /** + * [Optional] The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property. + */ + defaultPartitionExpirationMs?: string; + /** + * [Optional] The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property. + */ + defaultTableExpirationMs?: string; + /** + * [Optional] A user-friendly description of the dataset. + */ + description?: string; + /** + * [Output-only] A hash of the resource. + */ + etag?: string; + /** + * [Optional] A descriptive name for the dataset. + */ + friendlyName?: string; + /** + * [Output-only] The fully-qualified unique name of the dataset in the format projectId:datasetId. The dataset name without the project name is given in the datasetId field. When creating a new dataset, leave this field blank, and instead specify the datasetId field. + */ + id?: string; + /** + * [Output-only] The resource type. + */ + kind?: string; + /** + * The labels associated with this dataset. You can use these to organize and group your datasets. You can set this property when inserting or updating a dataset. See Creating and Updating Dataset Labels for more information. + */ + labels?: {[key: string]: string}; + /** + * [Output-only] The date when this dataset or any of its tables was last modified, in milliseconds since the epoch. + */ + lastModifiedTime?: string; + /** + * The geographic location where the dataset should reside. The default value is US. See details at https://cloud.google.com/bigquery/docs/locations. + */ + location?: string; + /** + * [Output-only] Reserved for future use. + */ + satisfiesPZS?: boolean; + /** + * [Output-only] A URL that can be used to access the resource again. You can use this URL in Get or Update requests to the resource. + */ + selfLink?: string; + }; + + type IDatasetList = { + /** + * An array of the dataset resources in the project. Each resource contains basic information. For full information about a particular dataset resource, use the Datasets: get method. This property is omitted when there are no datasets in the project. + */ + datasets?: Array<{ + /** + * The dataset reference. Use this property to access specific parts of the dataset's ID, such as project ID or dataset ID. + */ + datasetReference?: IDatasetReference; + /** + * A descriptive name for the dataset, if one exists. + */ + friendlyName?: string; + /** + * The fully-qualified, unique, opaque ID of the dataset. + */ + id?: string; + /** + * The resource type. This property always returns the value "bigquery#dataset". + */ + kind?: string; + /** + * The labels associated with this dataset. You can use these to organize and group your datasets. + */ + labels?: {[key: string]: string}; + /** + * The geographic location where the data resides. + */ + location?: string; + }>; + /** + * A hash value of the results page. You can use this property to determine if the page has changed since the last request. + */ + etag?: string; + /** + * The list type. This property always returns the value "bigquery#datasetList". + */ + kind?: string; + /** + * A token that can be used to request the next results page. This property is omitted on the final results page. + */ + nextPageToken?: string; + }; + + type IDatasetReference = { + /** + * [Required] A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. + */ + datasetId?: string; + /** + * [Optional] The ID of the project containing this dataset. + */ + projectId?: string; + }; + + type IDestinationTableProperties = { + /** + * [Optional] The description for the destination table. This will only be used if the destination table is newly created. If the table already exists and a value different than the current description is provided, the job will fail. + */ + description?: string; + /** + * [Optional] The friendly name for the destination table. This will only be used if the destination table is newly created. If the table already exists and a value different than the current friendly name is provided, the job will fail. + */ + friendlyName?: string; + /** + * [Optional] The labels associated with this table. You can use these to organize and group your tables. This will only be used if the destination table is newly created. If the table already exists and labels are different than the current labels are provided, the job will fail. + */ + labels?: {[key: string]: string}; + }; + + type IEncryptionConfiguration = { + /** + * [Optional] Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. + */ + kmsKeyName?: string; + }; + + /** + * A single entry in the confusion matrix. + */ + type IEntry = { + /** + * Number of items being predicted as this label. + */ + itemCount?: string; + /** + * The predicted label. For confidence_threshold > 0, we will also add an entry indicating the number of items under the confidence threshold. + */ + predictedLabel?: string; + }; + + type IErrorProto = { + /** + * Debugging information. This property is internal to Google and should not be used. + */ + debugInfo?: string; + /** + * Specifies where the error occurred, if present. + */ + location?: string; + /** + * A human-readable description of the error. + */ + message?: string; + /** + * A short error code that summarizes the error. + */ + reason?: string; + }; + + /** + * Evaluation metrics of a model. These are either computed on all training data or just the eval data based on whether eval data was used during training. These are not present for imported models. + */ + type IEvaluationMetrics = { + /** + * Populated for ARIMA models. + */ + arimaForecastingMetrics?: IArimaForecastingMetrics; + /** + * Populated for binary classification/classifier models. + */ + binaryClassificationMetrics?: IBinaryClassificationMetrics; + /** + * Populated for clustering models. + */ + clusteringMetrics?: IClusteringMetrics; + /** + * Populated for multi-class classification/classifier models. + */ + multiClassClassificationMetrics?: IMultiClassClassificationMetrics; + /** + * Populated for implicit feedback type matrix factorization models. + */ + rankingMetrics?: IRankingMetrics; + /** + * Populated for regression models and explicit feedback type matrix factorization models. + */ + regressionMetrics?: IRegressionMetrics; + }; + + type IExplainQueryStage = { + /** + * Number of parallel input segments completed. + */ + completedParallelInputs?: string; + /** + * Milliseconds the average shard spent on CPU-bound tasks. + */ + computeMsAvg?: string; + /** + * Milliseconds the slowest shard spent on CPU-bound tasks. + */ + computeMsMax?: string; + /** + * Relative amount of time the average shard spent on CPU-bound tasks. + */ + computeRatioAvg?: number; + /** + * Relative amount of time the slowest shard spent on CPU-bound tasks. + */ + computeRatioMax?: number; + /** + * Stage end time represented as milliseconds since epoch. + */ + endMs?: string; + /** + * Unique ID for stage within plan. + */ + id?: string; + /** + * IDs for stages that are inputs to this stage. + */ + inputStages?: Array; + /** + * Human-readable name for stage. + */ + name?: string; + /** + * Number of parallel input segments to be processed. + */ + parallelInputs?: string; + /** + * Milliseconds the average shard spent reading input. + */ + readMsAvg?: string; + /** + * Milliseconds the slowest shard spent reading input. + */ + readMsMax?: string; + /** + * Relative amount of time the average shard spent reading input. + */ + readRatioAvg?: number; + /** + * Relative amount of time the slowest shard spent reading input. + */ + readRatioMax?: number; + /** + * Number of records read into the stage. + */ + recordsRead?: string; + /** + * Number of records written by the stage. + */ + recordsWritten?: string; + /** + * Total number of bytes written to shuffle. + */ + shuffleOutputBytes?: string; + /** + * Total number of bytes written to shuffle and spilled to disk. + */ + shuffleOutputBytesSpilled?: string; + /** + * Slot-milliseconds used by the stage. + */ + slotMs?: string; + /** + * Stage start time represented as milliseconds since epoch. + */ + startMs?: string; + /** + * Current status for the stage. + */ + status?: string; + /** + * List of operations within the stage in dependency order (approximately chronological). + */ + steps?: Array; + /** + * Milliseconds the average shard spent waiting to be scheduled. + */ + waitMsAvg?: string; + /** + * Milliseconds the slowest shard spent waiting to be scheduled. + */ + waitMsMax?: string; + /** + * Relative amount of time the average shard spent waiting to be scheduled. + */ + waitRatioAvg?: number; + /** + * Relative amount of time the slowest shard spent waiting to be scheduled. + */ + waitRatioMax?: number; + /** + * Milliseconds the average shard spent on writing output. + */ + writeMsAvg?: string; + /** + * Milliseconds the slowest shard spent on writing output. + */ + writeMsMax?: string; + /** + * Relative amount of time the average shard spent on writing output. + */ + writeRatioAvg?: number; + /** + * Relative amount of time the slowest shard spent on writing output. + */ + writeRatioMax?: number; + }; + + type IExplainQueryStep = { + /** + * Machine-readable operation type. + */ + kind?: string; + /** + * Human-readable stage descriptions. + */ + substeps?: Array; + }; + + /** + * Explanation for a single feature. + */ + type IExplanation = { + /** + * Attribution of feature. + */ + attribution?: number; + /** + * Full name of the feature. For non-numerical features, will be formatted like .. Overall size of feature name will always be truncated to first 120 characters. + */ + featureName?: string; + }; + + /** + * Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information. + */ + type IExpr = { + /** + * Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI. + */ + description?: string; + /** + * Textual representation of an expression in Common Expression Language syntax. + */ + expression?: string; + /** + * Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file. + */ + location?: string; + /** + * Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression. + */ + title?: string; + }; + + type IExternalDataConfiguration = { + /** + * Try to detect schema and format options automatically. Any option specified explicitly will be honored. + */ + autodetect?: boolean; + /** + * [Optional] Additional options if sourceFormat is set to BIGTABLE. + */ + bigtableOptions?: IBigtableOptions; + /** + * [Optional] The compression type of the data source. Possible values include GZIP and NONE. The default value is NONE. This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats. + */ + compression?: string; + /** + * [Optional, Trusted Tester] Connection for external data source. + */ + connectionId?: string; + /** + * Additional properties to set if sourceFormat is set to CSV. + */ + csvOptions?: ICsvOptions; + /** + * [Optional] Additional options if sourceFormat is set to GOOGLE_SHEETS. + */ + googleSheetsOptions?: IGoogleSheetsOptions; + /** + * [Optional, Trusted Tester] Options to configure hive partitioning support. + */ + hivePartitioningOptions?: IHivePartitioningOptions; + /** + * [Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values that don't match any column names Google Cloud Bigtable: This setting is ignored. Google Cloud Datastore backups: This setting is ignored. Avro: This setting is ignored. + */ + ignoreUnknownValues?: boolean; + /** + * [Optional] The maximum number of bad records that BigQuery can ignore when reading data. If the number of bad records exceeds this value, an invalid error is returned in the job result. This is only valid for CSV, JSON, and Google Sheets. The default value is 0, which requires that all records are valid. This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats. + */ + maxBadRecords?: number; + /** + * [Optional] The schema for the data. Schema is required for CSV and JSON formats. Schema is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats. + */ + schema?: ITableSchema; + /** + * [Required] The data format. For CSV files, specify "CSV". For Google sheets, specify "GOOGLE_SHEETS". For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro files, specify "AVRO". For Google Cloud Datastore backups, specify "DATASTORE_BACKUP". [Beta] For Google Cloud Bigtable, specify "BIGTABLE". + */ + sourceFormat?: string; + /** + * [Required] The fully-qualified URIs that point to your data in Google Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character and it must come after the 'bucket' name. Size limits related to load jobs apply to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. For Google Cloud Datastore backups, exactly one URI can be specified. Also, the '*' wildcard character is not allowed. + */ + sourceUris?: Array; + }; + + /** + * Representative value of a single feature within the cluster. + */ + type IFeatureValue = { + /** + * The categorical feature value. + */ + categoricalValue?: ICategoricalValue; + /** + * The feature column name. + */ + featureColumn?: string; + /** + * The numerical feature value. This is the centroid value for this feature. + */ + numericalValue?: number; + }; + + /** + * Request message for `GetIamPolicy` method. + */ + type IGetIamPolicyRequest = { + /** + * OPTIONAL: A `GetPolicyOptions` object for specifying options to `GetIamPolicy`. + */ + options?: IGetPolicyOptions; + }; + + /** + * Encapsulates settings provided to GetIamPolicy. + */ + type IGetPolicyOptions = { + /** + * Optional. The policy format version to be returned. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional bindings must specify version 3. Policies without any conditional bindings may specify any valid value or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + */ + requestedPolicyVersion?: number; + }; + + type IGetQueryResultsResponse = { + /** + * Whether the query result was fetched from the query cache. + */ + cacheHit?: boolean; + /** + * [Output-only] The first errors or warnings encountered during the running of the job. The final message includes the number of errors that caused the process to stop. Errors here do not necessarily mean that the job has completed or was unsuccessful. + */ + errors?: Array; + /** + * A hash of this response. + */ + etag?: string; + /** + * Whether the query has completed or not. If rows or totalRows are present, this will always be true. If this is false, totalRows will not be available. + */ + jobComplete?: boolean; + /** + * Reference to the BigQuery Job that was created to run the query. This field will be present even if the original request timed out, in which case GetQueryResults can be used to read the results once the query has completed. Since this API only returns the first page of results, subsequent pages can be fetched via the same mechanism (GetQueryResults). + */ + jobReference?: IJobReference; + /** + * The resource type of the response. + */ + kind?: string; + /** + * [Output-only] The number of rows affected by a DML statement. Present only for DML statements INSERT, UPDATE or DELETE. + */ + numDmlAffectedRows?: string; + /** + * A token used for paging results. + */ + pageToken?: string; + /** + * An object with as many results as can be contained within the maximum permitted reply size. To get any additional rows, you can call GetQueryResults and specify the jobReference returned above. Present only when the query completes successfully. + */ + rows?: Array; + /** + * The schema of the results. Present only when the query completes successfully. + */ + schema?: ITableSchema; + /** + * The total number of bytes processed for this query. + */ + totalBytesProcessed?: string; + /** + * The total number of rows in the complete query result set, which can be more than the number of rows in this single page of results. Present only when the query completes successfully. + */ + totalRows?: string; + }; + + type IGetServiceAccountResponse = { + /** + * The service account email address. + */ + email?: string; + /** + * The resource type of the response. + */ + kind?: string; + }; + + /** + * Global explanations containing the top most important features after training. + */ + type IGlobalExplanation = { + /** + * Class label for this set of global explanations. Will be empty/null for binary logistic and linear regression models. Sorted alphabetically in descending order. + */ + classLabel?: string; + /** + * A list of the top global explanations. Sorted by absolute value of attribution in descending order. + */ + explanations?: Array; + }; + + type IGoogleSheetsOptions = { + /** + * [Optional] Range of a sheet to query from. Only used when non-empty. Typical format: sheet_name!top_left_cell_id:bottom_right_cell_id For example: sheet1!A1:B20 + */ + range?: string; + /** + * [Optional] The number of rows at the top of a sheet that BigQuery will skip when reading the data. The default value is 0. This property is useful if you have header rows that should be skipped. When autodetect is on, behavior is the following: * skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, the row is read as data. Otherwise data is read starting from the second row. * skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, row N is just skipped. Otherwise row N is used to extract column names for the detected schema. + */ + skipLeadingRows?: string; + }; + + type IHivePartitioningOptions = { + /** + * [Optional] When set, what mode of hive partitioning to use when reading data. The following modes are supported. (1) AUTO: automatically infer partition key name(s) and type(s). (2) STRINGS: automatically infer partition key name(s). All types are interpreted as strings. (3) CUSTOM: partition key schema is encoded in the source URI prefix. Not all storage formats support hive partitioning. Requesting hive partitioning on an unsupported format will lead to an error. Currently supported types include: AVRO, CSV, JSON, ORC and Parquet. + */ + mode?: string; + /** + * [Optional] If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified. Note that this field should only be true when creating a permanent external table or querying a temporary external table. Hive-partitioned loads with requirePartitionFilter explicitly set to true will fail. + */ + requirePartitionFilter?: boolean; + /** + * [Optional] When hive partition detection is requested, a common prefix for all source uris should be supplied. The prefix must end immediately before the partition key encoding begins. For example, consider files following this data layout. gs://bucket/path_to_table/dt=2019-01-01/country=BR/id=7/file.avro gs://bucket/path_to_table/dt=2018-12-31/country=CA/id=3/file.avro When hive partitioning is requested with either AUTO or STRINGS detection, the common prefix can be either of gs://bucket/path_to_table or gs://bucket/path_to_table/ (trailing slash does not matter). + */ + sourceUriPrefix?: string; + }; + + /** + * Information about a single iteration of the training run. + */ + type IIterationResult = { + arimaResult?: IArimaResult; + /** + * Information about top clusters for clustering models. + */ + clusterInfos?: Array; + /** + * Time taken to run the iteration in milliseconds. + */ + durationMs?: string; + /** + * Loss computed on the eval data at the end of iteration. + */ + evalLoss?: number; + /** + * Index of the iteration, 0 based. + */ + index?: number; + /** + * Learn rate used for this iteration. + */ + learnRate?: number; + /** + * Loss computed on the training data at the end of iteration. + */ + trainingLoss?: number; + }; + + type IJob = { + /** + * [Required] Describes the job configuration. + */ + configuration?: IJobConfiguration; + /** + * [Output-only] A hash of this resource. + */ + etag?: string; + /** + * [Output-only] Opaque ID field of the job + */ + id?: string; + /** + * [Optional] Reference describing the unique-per-user name of the job. + */ + jobReference?: IJobReference; + /** + * [Output-only] The type of the resource. + */ + kind?: string; + /** + * [Output-only] A URL that can be used to access this resource again. + */ + selfLink?: string; + /** + * [Output-only] Information about the job, including starting time and ending time of the job. + */ + statistics?: IJobStatistics; + /** + * [Output-only] The status of this job. Examine this value when polling an asynchronous job to see if the job is complete. + */ + status?: IJobStatus; + /** + * [Output-only] Email address of the user who ran the job. + */ + user_email?: string; + }; + + type IJobCancelResponse = { + /** + * The final state of the job. + */ + job?: IJob; + /** + * The resource type of the response. + */ + kind?: string; + }; + + type IJobConfiguration = { + /** + * [Pick one] Copies a table. + */ + copy?: IJobConfigurationTableCopy; + /** + * [Optional] If set, don't actually run this job. A valid query will return a mostly empty response with some processing statistics, while an invalid query will return the same error it would if it wasn't a dry run. Behavior of non-query jobs is undefined. + */ + dryRun?: boolean; + /** + * [Pick one] Configures an extract job. + */ + extract?: IJobConfigurationExtract; + /** + * [Optional] Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job. + */ + jobTimeoutMs?: string; + /** + * [Output-only] The type of the job. Can be QUERY, LOAD, EXTRACT, COPY or UNKNOWN. + */ + jobType?: string; + /** + * The labels associated with this job. You can use these to organize and group your jobs. Label keys and values can be no longer than 63 characters, can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key. + */ + labels?: {[key: string]: string}; + /** + * [Pick one] Configures a load job. + */ + load?: IJobConfigurationLoad; + /** + * [Pick one] Configures a query job. + */ + query?: IJobConfigurationQuery; + }; + + type IJobConfigurationExtract = { + /** + * [Optional] The compression type to use for exported files. Possible values include GZIP, DEFLATE, SNAPPY, and NONE. The default value is NONE. DEFLATE and SNAPPY are only supported for Avro. Not applicable when extracting models. + */ + compression?: string; + /** + * [Optional] The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON, PARQUET or AVRO for tables and ML_TF_SAVED_MODEL or ML_XGBOOST_BOOSTER for models. The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV. The default value for models is ML_TF_SAVED_MODEL. + */ + destinationFormat?: string; + /** + * [Pick one] DEPRECATED: Use destinationUris instead, passing only one URI as necessary. The fully-qualified Google Cloud Storage URI where the extracted table should be written. + */ + destinationUri?: string; + /** + * [Pick one] A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written. + */ + destinationUris?: Array; + /** + * [Optional] Delimiter to use between fields in the exported data. Default is ','. Not applicable when extracting models. + */ + fieldDelimiter?: string; + /** + * [Optional] Whether to print out a header row in the results. Default is true. Not applicable when extracting models. + */ + printHeader?: boolean; + /** + * A reference to the model being exported. + */ + sourceModel?: IModelReference; + /** + * A reference to the table being exported. + */ + sourceTable?: ITableReference; + /** + * [Optional] If destinationFormat is set to "AVRO", this flag indicates whether to enable extracting applicable column types (such as TIMESTAMP) to their corresponding AVRO logical types (timestamp-micros), instead of only using their raw types (avro-long). Not applicable when extracting models. + */ + useAvroLogicalTypes?: boolean; + }; + + type IJobConfigurationLoad = { + /** + * [Optional] Accept rows that are missing trailing optional columns. The missing values are treated as nulls. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats. + */ + allowJaggedRows?: boolean; + /** + * Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false. + */ + allowQuotedNewlines?: boolean; + /** + * [Optional] Indicates if we should automatically infer the options and schema for CSV and JSON sources. + */ + autodetect?: boolean; + /** + * [Beta] Clustering specification for the destination table. Must be specified with time-based partitioning, data in the table will be first partitioned and subsequently clustered. + */ + clustering?: IClustering; + /** + * [Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion. + */ + createDisposition?: string; + /** + * [Trusted Tester] Defines the list of possible SQL data types to which the source decimal values are converted. This list and the precision and the scale parameters of the decimal field determine the target type. In the order of NUMERIC, BIGNUMERIC, and STRING, a type is picked if it is in the specified list and if it supports the precision and the scale. STRING supports all precision and scale values. If none of the listed types supports the precision and the scale, the type supporting the widest range in the specified list is picked, and if a value exceeds the supported range when reading the data, an error will be thrown. For example: suppose decimal_target_type = ["NUMERIC", "BIGNUMERIC"]. Then if (precision,scale) is: * (38,9) -> NUMERIC; * (39,9) -> BIGNUMERIC (NUMERIC cannot hold 30 integer digits); * (38,10) -> BIGNUMERIC (NUMERIC cannot hold 10 fractional digits); * (76,38) -> BIGNUMERIC; * (77,38) -> BIGNUMERIC (error if value exeeds supported range). For duplicated types in this field, only one will be considered and the rest will be ignored. The order of the types in this field is ignored. For example, ["BIGNUMERIC", "NUMERIC"] is the same as ["NUMERIC", "BIGNUMERIC"] and NUMERIC always takes precedence over BIGNUMERIC. + */ + decimalTargetTypes?: Array; + /** + * Custom encryption configuration (e.g., Cloud KMS keys). + */ + destinationEncryptionConfiguration?: IEncryptionConfiguration; + /** + * [Required] The destination table to load the data into. + */ + destinationTable?: ITableReference; + /** + * [Beta] [Optional] Properties with which to create the destination table if it is new. + */ + destinationTableProperties?: IDestinationTableProperties; + /** + * [Optional] The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split using the values of the quote and fieldDelimiter properties. + */ + encoding?: string; + /** + * [Optional] The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character. To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. The default value is a comma (','). + */ + fieldDelimiter?: string; + /** + * [Optional, Trusted Tester] Options to configure hive partitioning support. + */ + hivePartitioningOptions?: IHivePartitioningOptions; + /** + * [Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values that don't match any column names + */ + ignoreUnknownValues?: boolean; + /** + * [Optional] The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value, an invalid error is returned in the job result. This is only valid for CSV and JSON. The default value is 0, which requires that all records are valid. + */ + maxBadRecords?: number; + /** + * [Optional] Specifies a string that represents a null value in a CSV file. For example, if you specify "\N", BigQuery interprets "\N" as a null value when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as an empty value. + */ + nullMarker?: string; + /** + * If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result. + */ + projectionFields?: Array; + /** + * [Optional] The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true. + */ + quote?: string; + /** + * [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning and rangePartitioning should be specified. + */ + rangePartitioning?: IRangePartitioning; + /** + * [Optional] The schema for the destination table. The schema can be omitted if the destination table already exists, or if you're loading data from Google Cloud Datastore. + */ + schema?: ITableSchema; + /** + * [Deprecated] The inline schema. For CSV schemas, specify as "Field1:Type1[,Field2:Type2]*". For example, "foo:STRING, bar:INTEGER, baz:FLOAT". + */ + schemaInline?: string; + /** + * [Deprecated] The format of the schemaInline property. + */ + schemaInlineFormat?: string; + /** + * Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or supplied in the job configuration. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. + */ + schemaUpdateOptions?: Array; + /** + * [Optional] The number of rows at the top of a CSV file that BigQuery will skip when loading the data. The default value is 0. This property is useful if you have header rows in the file that should be skipped. + */ + skipLeadingRows?: number; + /** + * [Optional] The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET". For orc, specify "ORC". The default value is CSV. + */ + sourceFormat?: string; + /** + * [Required] The fully-qualified URIs that point to your data in Google Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character and it must come after the 'bucket' name. Size limits related to load jobs apply to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '*' wildcard character is not allowed. + */ + sourceUris?: Array; + /** + * Time-based partitioning specification for the destination table. Only one of timePartitioning and rangePartitioning should be specified. + */ + timePartitioning?: ITimePartitioning; + /** + * [Optional] If sourceFormat is set to "AVRO", indicates whether to enable interpreting logical types into their corresponding types (ie. TIMESTAMP), instead of only using their raw types (ie. INTEGER). + */ + useAvroLogicalTypes?: boolean; + /** + * [Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_APPEND. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion. + */ + writeDisposition?: string; + }; + + type IJobConfigurationQuery = { + /** + * [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance. Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed. However, you must still set destinationTable when result size exceeds the allowed maximum response size. + */ + allowLargeResults?: boolean; + /** + * [Beta] Clustering specification for the destination table. Must be specified with time-based partitioning, data in the table will be first partitioned and subsequently clustered. + */ + clustering?: IClustering; + /** + * Connection properties. + */ + connectionProperties?: Array; + /** + * [Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion. + */ + createDisposition?: string; + /** + * [Optional] Specifies the default dataset to use for unqualified table names in the query. Note that this does not alter behavior of unqualified dataset names. + */ + defaultDataset?: IDatasetReference; + /** + * Custom encryption configuration (e.g., Cloud KMS keys). + */ + destinationEncryptionConfiguration?: IEncryptionConfiguration; + /** + * [Optional] Describes the table where the query results should be stored. If not present, a new table will be created to store the results. This property must be set for large results that exceed the maximum response size. + */ + destinationTable?: ITableReference; + /** + * [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results. allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened. + */ + flattenResults?: boolean; + /** + * [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). If unspecified, this will be set to your project default. + */ + maximumBillingTier?: number; + /** + * [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). If unspecified, this will be set to your project default. + */ + maximumBytesBilled?: string; + /** + * Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query. + */ + parameterMode?: string; + /** + * [Deprecated] This property is deprecated. + */ + preserveNulls?: boolean; + /** + * [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH. The default value is INTERACTIVE. + */ + priority?: string; + /** + * [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL. + */ + query?: string; + /** + * Query parameters for standard SQL queries. + */ + queryParameters?: Array; + /** + * [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning and rangePartitioning should be specified. + */ + rangePartitioning?: IRangePartitioning; + /** + * Allows the schema of the destination table to be updated as a side effect of the query job. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. + */ + schemaUpdateOptions?: Array; + /** + * [Optional] If querying an external data source outside of BigQuery, describes the data format, location and other properties of the data source. By defining these properties, the data source can then be queried as if it were a standard BigQuery table. + */ + tableDefinitions?: {[key: string]: IExternalDataConfiguration}; + /** + * Time-based partitioning specification for the destination table. Only one of timePartitioning and rangePartitioning should be specified. + */ + timePartitioning?: ITimePartitioning; + /** + * Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value of flattenResults is ignored; query will be run as if flattenResults is false. + */ + useLegacySql?: boolean; + /** + * [Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified. The default value is true. + */ + useQueryCache?: boolean; + /** + * Describes user-defined function resources used in the query. + */ + userDefinedFunctionResources?: Array; + /** + * [Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion. + */ + writeDisposition?: string; + }; + + type IJobConfigurationTableCopy = { + /** + * [Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion. + */ + createDisposition?: string; + /** + * Custom encryption configuration (e.g., Cloud KMS keys). + */ + destinationEncryptionConfiguration?: IEncryptionConfiguration; + /** + * [Optional] The time when the destination table expires. Expired tables will be deleted and their storage reclaimed. + */ + destinationExpirationTime?: any; + /** + * [Required] The destination table + */ + destinationTable?: ITableReference; + /** + * [Optional] Supported operation types in table copy job. + */ + operationType?: string; + /** + * [Pick one] Source table to copy. + */ + sourceTable?: ITableReference; + /** + * [Pick one] Source tables to copy. + */ + sourceTables?: Array; + /** + * [Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion. + */ + writeDisposition?: string; + }; + + type IJobList = { + /** + * A hash of this page of results. + */ + etag?: string; + /** + * List of jobs that were requested. + */ + jobs?: Array<{ + /** + * [Full-projection-only] Specifies the job configuration. + */ + configuration?: IJobConfiguration; + /** + * A result object that will be present only if the job has failed. + */ + errorResult?: IErrorProto; + /** + * Unique opaque ID of the job. + */ + id?: string; + /** + * Job reference uniquely identifying the job. + */ + jobReference?: IJobReference; + /** + * The resource type. + */ + kind?: string; + /** + * Running state of the job. When the state is DONE, errorResult can be checked to determine whether the job succeeded or failed. + */ + state?: string; + /** + * [Output-only] Information about the job, including starting time and ending time of the job. + */ + statistics?: IJobStatistics; + /** + * [Full-projection-only] Describes the state of the job. + */ + status?: IJobStatus; + /** + * [Full-projection-only] Email address of the user who ran the job. + */ + user_email?: string; + }>; + /** + * The resource type of the response. + */ + kind?: string; + /** + * A token to request the next page of results. + */ + nextPageToken?: string; + }; + + type IJobReference = { + /** + * [Required] The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters. + */ + jobId?: string; + /** + * The geographic location of the job. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location. + */ + location?: string; + /** + * [Required] The ID of the project containing this job. + */ + projectId?: string; + }; + + type IJobStatistics = { + /** + * [TrustedTester] [Output-only] Job progress (0.0 -> 1.0) for LOAD and EXTRACT jobs. + */ + completionRatio?: number; + /** + * [Output-only] Creation time of this job, in milliseconds since the epoch. This field will be present on all jobs. + */ + creationTime?: string; + /** + * [Output-only] End time of this job, in milliseconds since the epoch. This field will be present whenever a job is in the DONE state. + */ + endTime?: string; + /** + * [Output-only] Statistics for an extract job. + */ + extract?: IJobStatistics4; + /** + * [Output-only] Statistics for a load job. + */ + load?: IJobStatistics3; + /** + * [Output-only] Number of child jobs executed. + */ + numChildJobs?: string; + /** + * [Output-only] If this is a child job, the id of the parent. + */ + parentJobId?: string; + /** + * [Output-only] Statistics for a query job. + */ + query?: IJobStatistics2; + /** + * [Output-only] Quotas which delayed this job's start time. + */ + quotaDeferments?: Array; + /** + * [Output-only] Job resource usage breakdown by reservation. + */ + reservationUsage?: Array<{ + /** + * [Output-only] Reservation name or "unreserved" for on-demand resources usage. + */ + name?: string; + /** + * [Output-only] Slot-milliseconds the job spent in the given reservation. + */ + slotMs?: string; + }>; + /** + * [Output-only] Name of the primary reservation assigned to this job. Note that this could be different than reservations reported in the reservation usage field if parent reservations were used to execute this job. + */ + reservation_id?: string; + /** + * [Output-only] [Preview] Statistics for row-level security. Present only for query and extract jobs. + */ + rowLevelSecurityStatistics?: IRowLevelSecurityStatistics; + /** + * [Output-only] Statistics for a child job of a script. + */ + scriptStatistics?: IScriptStatistics; + /** + * [Output-only] Start time of this job, in milliseconds since the epoch. This field will be present when the job transitions from the PENDING state to either RUNNING or DONE. + */ + startTime?: string; + /** + * [Output-only] [Deprecated] Use the bytes processed in the query statistics instead. + */ + totalBytesProcessed?: string; + /** + * [Output-only] Slot-milliseconds for the job. + */ + totalSlotMs?: string; + /** + * [Output-only] [Alpha] Information of the multi-statement transaction if this job is part of one. + */ + transactionInfoTemplate?: ITransactionInfo; + }; + + type IJobStatistics2 = { + /** + * [Output-only] Billing tier for the job. + */ + billingTier?: number; + /** + * [Output-only] Whether the query result was fetched from the query cache. + */ + cacheHit?: boolean; + /** + * [Output-only] [Preview] The number of row access policies affected by a DDL statement. Present only for DROP ALL ROW ACCESS POLICIES queries. + */ + ddlAffectedRowAccessPolicyCount?: string; + /** + * The DDL operation performed, possibly dependent on the pre-existence of the DDL target. Possible values (new values might be added in the future): "CREATE": The query created the DDL target. "SKIP": No-op. Example cases: the query is CREATE TABLE IF NOT EXISTS while the table already exists, or the query is DROP TABLE IF EXISTS while the table does not exist. "REPLACE": The query replaced the DDL target. Example case: the query is CREATE OR REPLACE TABLE, and the table already exists. "DROP": The query deleted the DDL target. + */ + ddlOperationPerformed?: string; + /** + * The DDL target routine. Present only for CREATE/DROP FUNCTION/PROCEDURE queries. + */ + ddlTargetRoutine?: IRoutineReference; + /** + * [Output-only] [Preview] The DDL target row access policy. Present only for CREATE/DROP ROW ACCESS POLICY queries. + */ + ddlTargetRowAccessPolicy?: IRowAccessPolicyReference; + /** + * [Output-only] The DDL target table. Present only for CREATE/DROP TABLE/VIEW and DROP ALL ROW ACCESS POLICIES queries. + */ + ddlTargetTable?: ITableReference; + /** + * [Output-only] The original estimate of bytes processed for the job. + */ + estimatedBytesProcessed?: string; + /** + * [Output-only, Beta] Information about create model query job progress. + */ + modelTraining?: IBigQueryModelTraining; + /** + * [Output-only, Beta] Deprecated; do not use. + */ + modelTrainingCurrentIteration?: number; + /** + * [Output-only, Beta] Deprecated; do not use. + */ + modelTrainingExpectedTotalIteration?: string; + /** + * [Output-only] The number of rows affected by a DML statement. Present only for DML statements INSERT, UPDATE or DELETE. + */ + numDmlAffectedRows?: string; + /** + * [Output-only] Describes execution plan for the query. + */ + queryPlan?: Array; + /** + * [Output-only] Referenced routines (persistent user-defined functions and stored procedures) for the job. + */ + referencedRoutines?: Array; + /** + * [Output-only] Referenced tables for the job. Queries that reference more than 50 tables will not have a complete list. + */ + referencedTables?: Array; + /** + * [Output-only] Job resource usage breakdown by reservation. + */ + reservationUsage?: Array<{ + /** + * [Output-only] Reservation name or "unreserved" for on-demand resources usage. + */ + name?: string; + /** + * [Output-only] Slot-milliseconds the job spent in the given reservation. + */ + slotMs?: string; + }>; + /** + * [Output-only] The schema of the results. Present only for successful dry run of non-legacy SQL queries. + */ + schema?: ITableSchema; + /** + * The type of query statement, if valid. Possible values (new values might be added in the future): "SELECT": SELECT query. "INSERT": INSERT query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language. "UPDATE": UPDATE query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language. "DELETE": DELETE query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language. "MERGE": MERGE query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language. "ALTER_TABLE": ALTER TABLE query. "ALTER_VIEW": ALTER VIEW query. "ASSERT": ASSERT condition AS 'description'. "CREATE_FUNCTION": CREATE FUNCTION query. "CREATE_MODEL": CREATE [OR REPLACE] MODEL ... AS SELECT ... . "CREATE_PROCEDURE": CREATE PROCEDURE query. "CREATE_TABLE": CREATE [OR REPLACE] TABLE without AS SELECT. "CREATE_TABLE_AS_SELECT": CREATE [OR REPLACE] TABLE ... AS SELECT ... . "CREATE_VIEW": CREATE [OR REPLACE] VIEW ... AS SELECT ... . "DROP_FUNCTION" : DROP FUNCTION query. "DROP_PROCEDURE": DROP PROCEDURE query. "DROP_TABLE": DROP TABLE query. "DROP_VIEW": DROP VIEW query. + */ + statementType?: string; + /** + * [Output-only] [Beta] Describes a timeline of job execution. + */ + timeline?: Array; + /** + * [Output-only] Total bytes billed for the job. + */ + totalBytesBilled?: string; + /** + * [Output-only] Total bytes processed for the job. + */ + totalBytesProcessed?: string; + /** + * [Output-only] For dry-run jobs, totalBytesProcessed is an estimate and this field specifies the accuracy of the estimate. Possible values can be: UNKNOWN: accuracy of the estimate is unknown. PRECISE: estimate is precise. LOWER_BOUND: estimate is lower bound of what the query would cost. UPPER_BOUND: estimate is upper bound of what the query would cost. + */ + totalBytesProcessedAccuracy?: string; + /** + * [Output-only] Total number of partitions processed from all partitioned tables referenced in the job. + */ + totalPartitionsProcessed?: string; + /** + * [Output-only] Slot-milliseconds for the job. + */ + totalSlotMs?: string; + /** + * Standard SQL only: list of undeclared query parameters detected during a dry run validation. + */ + undeclaredQueryParameters?: Array; + }; + + type IJobStatistics3 = { + /** + * [Output-only] The number of bad records encountered. Note that if the job has failed because of more bad records encountered than the maximum allowed in the load job configuration, then this number can be less than the total number of bad records present in the input data. + */ + badRecords?: string; + /** + * [Output-only] Number of bytes of source data in a load job. + */ + inputFileBytes?: string; + /** + * [Output-only] Number of source files in a load job. + */ + inputFiles?: string; + /** + * [Output-only] Size of the loaded data in bytes. Note that while a load job is in the running state, this value may change. + */ + outputBytes?: string; + /** + * [Output-only] Number of rows imported in a load job. Note that while an import job is in the running state, this value may change. + */ + outputRows?: string; + }; + + type IJobStatistics4 = { + /** + * [Output-only] Number of files per destination URI or URI pattern specified in the extract configuration. These values will be in the same order as the URIs specified in the 'destinationUris' field. + */ + destinationUriFileCounts?: Array; + /** + * [Output-only] Number of user bytes extracted into the result. This is the byte count as computed by BigQuery for billing purposes. + */ + inputBytes?: string; + }; + + type IJobStatus = { + /** + * [Output-only] Final error result of the job. If present, indicates that the job has completed and was unsuccessful. + */ + errorResult?: IErrorProto; + /** + * [Output-only] The first errors encountered during the running of the job. The final message includes the number of errors that caused the process to stop. Errors here do not necessarily mean that the job has completed or was unsuccessful. + */ + errors?: Array; + /** + * [Output-only] Running state of the job. + */ + state?: string; + }; + + /** + * Represents a single JSON object. + */ + type IJsonObject = {[key: string]: IJsonValue}; + + type IJsonValue = any; + + type IListModelsResponse = { + /** + * Models in the requested dataset. Only the following fields are populated: model_reference, model_type, creation_time, last_modified_time and labels. + */ + models?: Array; + /** + * A token to request the next page of results. + */ + nextPageToken?: string; + }; + + type IListRoutinesResponse = { + /** + * A token to request the next page of results. + */ + nextPageToken?: string; + /** + * Routines in the requested dataset. Unless read_mask is set in the request, only the following fields are populated: etag, project_id, dataset_id, routine_id, routine_type, creation_time, last_modified_time, and language. + */ + routines?: Array; + }; + + /** + * Response message for the ListRowAccessPolicies method. + */ + type IListRowAccessPoliciesResponse = { + /** + * A token to request the next page of results. + */ + nextPageToken?: string; + /** + * Row access policies on the requested table. + */ + rowAccessPolicies?: Array; + }; + + /** + * BigQuery-specific metadata about a location. This will be set on google.cloud.location.Location.metadata in Cloud Location API responses. + */ + type ILocationMetadata = { + /** + * The legacy BigQuery location ID, e.g. “EU” for the “europe” location. This is for any API consumers that need the legacy “US” and “EU” locations. + */ + legacyLocationId?: string; + }; + + type IMaterializedViewDefinition = { + /** + * [Optional] [TrustedTester] Enable automatic refresh of the materialized view when the base table is updated. The default value is "true". + */ + enableRefresh?: boolean; + /** + * [Output-only] [TrustedTester] The time when this materialized view was last modified, in milliseconds since the epoch. + */ + lastRefreshTime?: string; + /** + * [Required] A query whose result is persisted. + */ + query?: string; + /** + * [Optional] [TrustedTester] The maximum frequency at which this materialized view will be refreshed. The default value is "1800000" (30 minutes). + */ + refreshIntervalMs?: string; + }; + + type IModel = { + /** + * Output only. The time when this model was created, in millisecs since the epoch. + */ + creationTime?: string; + /** + * Optional. A user-friendly description of this model. + */ + description?: string; + /** + * Custom encryption configuration (e.g., Cloud KMS keys). This shows the encryption configuration of the model data while stored in BigQuery storage. This field can be used with PatchModel to update encryption key for an already encrypted model. + */ + encryptionConfiguration?: IEncryptionConfiguration; + /** + * Output only. A hash of this resource. + */ + etag?: string; + /** + * Optional. The time when this model expires, in milliseconds since the epoch. If not present, the model will persist indefinitely. Expired models will be deleted and their storage reclaimed. The defaultTableExpirationMs property of the encapsulating dataset can be used to set a default expirationTime on newly created models. + */ + expirationTime?: string; + /** + * Output only. Input feature columns that were used to train this model. + */ + featureColumns?: Array; + /** + * Optional. A descriptive name for this model. + */ + friendlyName?: string; + /** + * Output only. Label columns that were used to train this model. The output of the model will have a "predicted_" prefix to these columns. + */ + labelColumns?: Array; + /** + * The labels associated with this model. You can use these to organize and group your models. Label keys and values can be no longer than 63 characters, can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key. + */ + labels?: {[key: string]: string}; + /** + * Output only. The time when this model was last modified, in millisecs since the epoch. + */ + lastModifiedTime?: string; + /** + * Output only. The geographic location where the model resides. This value is inherited from the dataset. + */ + location?: string; + /** + * Required. Unique identifier for this model. + */ + modelReference?: IModelReference; + /** + * Output only. Type of the model resource. + */ + modelType?: + | 'MODEL_TYPE_UNSPECIFIED' + | 'LINEAR_REGRESSION' + | 'LOGISTIC_REGRESSION' + | 'KMEANS' + | 'MATRIX_FACTORIZATION' + | 'DNN_CLASSIFIER' + | 'TENSORFLOW' + | 'DNN_REGRESSOR' + | 'BOOSTED_TREE_REGRESSOR' + | 'BOOSTED_TREE_CLASSIFIER' + | 'ARIMA' + | 'AUTOML_REGRESSOR' + | 'AUTOML_CLASSIFIER'; + /** + * Output only. Information for all training runs in increasing order of start_time. + */ + trainingRuns?: Array; + }; + + type IModelDefinition = { + /** + * [Output-only, Beta] Model options used for the first training run. These options are immutable for subsequent training runs. Default values are used for any options not specified in the input query. + */ + modelOptions?: { + labels?: Array; + lossType?: string; + modelType?: string; + }; + /** + * [Output-only, Beta] Information about ml training runs, each training run comprises of multiple iterations and there may be multiple training runs for the model if warm start is used or if a user decides to continue a previously cancelled query. + */ + trainingRuns?: Array; + }; + + type IModelReference = { + /** + * [Required] The ID of the dataset containing this model. + */ + datasetId?: string; + /** + * [Required] The ID of the model. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. + */ + modelId?: string; + /** + * [Required] The ID of the project containing this model. + */ + projectId?: string; + }; + + /** + * Evaluation metrics for multi-class classification/classifier models. + */ + type IMultiClassClassificationMetrics = { + /** + * Aggregate classification metrics. + */ + aggregateClassificationMetrics?: IAggregateClassificationMetrics; + /** + * Confusion matrix at different thresholds. + */ + confusionMatrixList?: Array; + }; + + /** + * An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members` to a single `role`. Members can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 } **YAML example:** bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/). + */ + type IPolicy = { + /** + * Specifies cloud audit logging configuration for this policy. + */ + auditConfigs?: Array; + /** + * Associates a list of `members` to a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one member. + */ + bindings?: Array; + /** + * `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. + */ + etag?: string; + /** + * Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + */ + version?: number; + }; + + type IProjectList = { + /** + * A hash of the page of results + */ + etag?: string; + /** + * The type of list. + */ + kind?: string; + /** + * A token to request the next page of results. + */ + nextPageToken?: string; + /** + * Projects to which you have at least READ access. + */ + projects?: Array<{ + /** + * A descriptive name for this project. + */ + friendlyName?: string; + /** + * An opaque ID of this project. + */ + id?: string; + /** + * The resource type. + */ + kind?: string; + /** + * The numeric ID of this project. + */ + numericId?: string; + /** + * A unique reference to this project. + */ + projectReference?: IProjectReference; + }>; + /** + * The total number of projects in the list. + */ + totalItems?: number; + }; + + type IProjectReference = { + /** + * [Required] ID of the project. Can be either the numeric ID or the assigned ID of the project. + */ + projectId?: string; + }; + + type IQueryParameter = { + /** + * [Optional] If unset, this is a positional parameter. Otherwise, should be unique within a query. + */ + name?: string; + /** + * [Required] The type of this parameter. + */ + parameterType?: IQueryParameterType; + /** + * [Required] The value of this parameter. + */ + parameterValue?: IQueryParameterValue; + }; + + type IQueryParameterType = { + /** + * [Optional] The type of the array's elements, if this is an array. + */ + arrayType?: IQueryParameterType; + /** + * [Optional] The types of the fields of this struct, in order, if this is a struct. + */ + structTypes?: Array<{ + /** + * [Optional] Human-oriented description of the field. + */ + description?: string; + /** + * [Optional] The name of this field. + */ + name?: string; + /** + * [Required] The type of this field. + */ + type?: IQueryParameterType; + }>; + /** + * [Required] The top level type of this field. + */ + type?: string; + }; + + type IQueryParameterValue = { + /** + * [Optional] The array values, if this is an array type. + */ + arrayValues?: Array; + /** + * [Optional] The struct field values, in order of the struct type's declaration. + */ + structValues?: {[key: string]: IQueryParameterValue}; + /** + * [Optional] The value of this value, if a simple scalar type. + */ + value?: string; + }; + + type IQueryRequest = { + /** + * Connection properties. + */ + connectionProperties?: Array; + /** + * [Optional] Specifies the default datasetId and projectId to assume for any unqualified table names in the query. If not set, all table names in the query string must be qualified in the format 'datasetId.tableId'. + */ + defaultDataset?: IDatasetReference; + /** + * [Optional] If set to true, BigQuery doesn't run the job. Instead, if the query is valid, BigQuery returns statistics about the job such as how many bytes would be processed. If the query is invalid, an error returns. The default value is false. + */ + dryRun?: boolean; + /** + * The resource type of the request. + */ + kind?: string; + /** + * The labels associated with this job. You can use these to organize and group your jobs. Label keys and values can be no longer than 63 characters, can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key. + */ + labels?: {[key: string]: string}; + /** + * The geographic location where the job should run. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location. + */ + location?: string; + /** + * [Optional] The maximum number of rows of data to return per page of results. Setting this flag to a small value such as 1000 and then paging through results might improve reliability when the query result set is large. In addition to this limit, responses are also limited to 10 MB. By default, there is no maximum row count, and only the byte limit applies. + */ + maxResults?: number; + /** + * [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). If unspecified, this will be set to your project default. + */ + maximumBytesBilled?: string; + /** + * Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query. + */ + parameterMode?: string; + /** + * [Deprecated] This property is deprecated. + */ + preserveNulls?: boolean; + /** + * [Required] A query string, following the BigQuery query syntax, of the query to execute. Example: "SELECT count(f1) FROM [myProjectId:myDatasetId.myTableId]". + */ + query?: string; + /** + * Query parameters for Standard SQL queries. + */ + queryParameters?: Array; + /** + * A unique user provided identifier to ensure idempotent behavior for queries. Note that this is different from the job_id. It has the following properties: 1. It is case-sensitive, limited to up to 36 ASCII characters. A UUID is recommended. 2. Read only queries can ignore this token since they are nullipotent by definition. 3. For the purposes of idempotency ensured by the request_id, a request is considered duplicate of another only if they have the same request_id and are actually duplicates. When determining whether a request is a duplicate of the previous request, all parameters in the request that may affect the behavior are considered. For example, query, connection_properties, query_parameters, use_legacy_sql are parameters that affect the result and are considered when determining whether a request is a duplicate, but properties like timeout_ms don't affect the result and are thus not considered. Dry run query requests are never considered duplicate of another request. 4. When a duplicate mutating query request is detected, it returns: a. the results of the mutation if it completes successfully within the timeout. b. the running operation if it is still in progress at the end of the timeout. 5. Its lifetime is limited to 15 minutes. In other words, if two requests are sent with the same request_id, but more than 15 minutes apart, idempotency is not guaranteed. + */ + requestId?: string; + /** + * [Optional] How long to wait for the query to complete, in milliseconds, before the request times out and returns. Note that this is only a timeout for the request, not the query. If the query takes longer to run than the timeout value, the call returns without any results and with the 'jobComplete' flag set to false. You can call GetQueryResults() to wait for the query to complete and read the results. The default value is 10000 milliseconds (10 seconds). + */ + timeoutMs?: number; + /** + * Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value of flattenResults is ignored; query will be run as if flattenResults is false. + */ + useLegacySql?: boolean; + /** + * [Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. The default value is true. + */ + useQueryCache?: boolean; + }; + + type IQueryResponse = { + /** + * Whether the query result was fetched from the query cache. + */ + cacheHit?: boolean; + /** + * [Output-only] The first errors or warnings encountered during the running of the job. The final message includes the number of errors that caused the process to stop. Errors here do not necessarily mean that the job has completed or was unsuccessful. + */ + errors?: Array; + /** + * Whether the query has completed or not. If rows or totalRows are present, this will always be true. If this is false, totalRows will not be available. + */ + jobComplete?: boolean; + /** + * Reference to the Job that was created to run the query. This field will be present even if the original request timed out, in which case GetQueryResults can be used to read the results once the query has completed. Since this API only returns the first page of results, subsequent pages can be fetched via the same mechanism (GetQueryResults). + */ + jobReference?: IJobReference; + /** + * The resource type. + */ + kind?: string; + /** + * [Output-only] The number of rows affected by a DML statement. Present only for DML statements INSERT, UPDATE or DELETE. + */ + numDmlAffectedRows?: string; + /** + * A token used for paging results. + */ + pageToken?: string; + /** + * An object with as many results as can be contained within the maximum permitted reply size. To get any additional rows, you can call GetQueryResults and specify the jobReference returned above. + */ + rows?: Array; + /** + * The schema of the results. Present only when the query completes successfully. + */ + schema?: ITableSchema; + /** + * The total number of bytes processed for this query. If this query was a dry run, this is the number of bytes that would be processed if the query were run. + */ + totalBytesProcessed?: string; + /** + * The total number of rows in the complete query result set, which can be more than the number of rows in this single page of results. + */ + totalRows?: string; + }; + + type IQueryTimelineSample = { + /** + * Total number of units currently being processed by workers. This does not correspond directly to slot usage. This is the largest value observed since the last sample. + */ + activeUnits?: string; + /** + * Total parallel units of work completed by this query. + */ + completedUnits?: string; + /** + * Milliseconds elapsed since the start of query execution. + */ + elapsedMs?: string; + /** + * Total parallel units of work remaining for the active stages. + */ + pendingUnits?: string; + /** + * Cumulative slot-ms consumed by the query. + */ + totalSlotMs?: string; + }; + + type IRangePartitioning = { + /** + * [TrustedTester] [Required] The table is partitioned by this field. The field must be a top-level NULLABLE/REQUIRED field. The only supported type is INTEGER/INT64. + */ + field?: string; + /** + * [TrustedTester] [Required] Defines the ranges for range partitioning. + */ + range?: { + /** + * [TrustedTester] [Required] The end of range partitioning, exclusive. + */ + end?: string; + /** + * [TrustedTester] [Required] The width of each interval. + */ + interval?: string; + /** + * [TrustedTester] [Required] The start of range partitioning, inclusive. + */ + start?: string; + }; + }; + + /** + * Evaluation metrics used by weighted-ALS models specified by feedback_type=implicit. + */ + type IRankingMetrics = { + /** + * Determines the goodness of a ranking by computing the percentile rank from the predicted confidence and dividing it by the original rank. + */ + averageRank?: number; + /** + * Calculates a precision per user for all the items by ranking them and then averages all the precisions across all the users. + */ + meanAveragePrecision?: number; + /** + * Similar to the mean squared error computed in regression and explicit recommendation models except instead of computing the rating directly, the output from evaluate is computed against a preference which is 1 or 0 depending on if the rating exists or not. + */ + meanSquaredError?: number; + /** + * A metric to determine the goodness of a ranking calculated from the predicted confidence by comparing it to an ideal rank measured by the original ratings. + */ + normalizedDiscountedCumulativeGain?: number; + }; + + /** + * Evaluation metrics for regression and explicit feedback type matrix factorization models. + */ + type IRegressionMetrics = { + /** + * Mean absolute error. + */ + meanAbsoluteError?: number; + /** + * Mean squared error. + */ + meanSquaredError?: number; + /** + * Mean squared log error. + */ + meanSquaredLogError?: number; + /** + * Median absolute error. + */ + medianAbsoluteError?: number; + /** + * R^2 score. + */ + rSquared?: number; + }; + + /** + * A user-defined function or a stored procedure. + */ + type IRoutine = { + /** + * Optional. + */ + arguments?: Array; + /** + * Output only. The time when this routine was created, in milliseconds since the epoch. + */ + creationTime?: string; + /** + * Required. The body of the routine. For functions, this is the expression in the AS clause. If language=SQL, it is the substring inside (but excluding) the parentheses. For example, for the function created with the following statement: `CREATE FUNCTION JoinLines(x string, y string) as (concat(x, "\n", y))` The definition_body is `concat(x, "\n", y)` (\n is not replaced with linebreak). If language=JAVASCRIPT, it is the evaluated string in the AS clause. For example, for the function created with the following statement: `CREATE FUNCTION f() RETURNS STRING LANGUAGE js AS 'return "\n";\n'` The definition_body is `return "\n";\n` Note that both \n are replaced with linebreaks. + */ + definitionBody?: string; + /** + * Optional. [Experimental] The description of the routine if defined. + */ + description?: string; + /** + * Optional. [Experimental] The determinism level of the JavaScript UDF if defined. + */ + determinismLevel?: + | 'DETERMINISM_LEVEL_UNSPECIFIED' + | 'DETERMINISTIC' + | 'NOT_DETERMINISTIC'; + /** + * Output only. A hash of this resource. + */ + etag?: string; + /** + * Optional. If language = "JAVASCRIPT", this field stores the path of the imported JAVASCRIPT libraries. + */ + importedLibraries?: Array; + /** + * Optional. Defaults to "SQL". + */ + language?: 'LANGUAGE_UNSPECIFIED' | 'SQL' | 'JAVASCRIPT'; + /** + * Output only. The time when this routine was last modified, in milliseconds since the epoch. + */ + lastModifiedTime?: string; + /** + * Optional if language = "SQL"; required otherwise. If absent, the return type is inferred from definition_body at query time in each query that references this routine. If present, then the evaluated result will be cast to the specified returned type at query time. For example, for the functions created with the following statements: * `CREATE FUNCTION Add(x FLOAT64, y FLOAT64) RETURNS FLOAT64 AS (x + y);` * `CREATE FUNCTION Increment(x FLOAT64) AS (Add(x, 1));` * `CREATE FUNCTION Decrement(x FLOAT64) RETURNS FLOAT64 AS (Add(x, -1));` The return_type is `{type_kind: "FLOAT64"}` for `Add` and `Decrement`, and is absent for `Increment` (inferred as FLOAT64 at query time). Suppose the function `Add` is replaced by `CREATE OR REPLACE FUNCTION Add(x INT64, y INT64) AS (x + y);` Then the inferred return type of `Increment` is automatically changed to INT64 at query time, while the return type of `Decrement` remains FLOAT64. + */ + returnType?: IStandardSqlDataType; + /** + * Required. Reference describing the ID of this routine. + */ + routineReference?: IRoutineReference; + /** + * Required. The type of routine. + */ + routineType?: 'ROUTINE_TYPE_UNSPECIFIED' | 'SCALAR_FUNCTION' | 'PROCEDURE'; + }; + + type IRoutineReference = { + /** + * [Required] The ID of the dataset containing this routine. + */ + datasetId?: string; + /** + * [Required] The ID of the project containing this routine. + */ + projectId?: string; + /** + * [Required] The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. + */ + routineId?: string; + }; + + /** + * A single row in the confusion matrix. + */ + type IRow = { + /** + * The original label of this row. + */ + actualLabel?: string; + /** + * Info describing predicted label distribution. + */ + entries?: Array; + }; + + /** + * Represents access on a subset of rows on the specified table, defined by its filter predicate. Access to the subset of rows is controlled by its IAM policy. + */ + type IRowAccessPolicy = { + /** + * Output only. The time when this row access policy was created, in milliseconds since the epoch. + */ + creationTime?: string; + /** + * Output only. A hash of this resource. + */ + etag?: string; + /** + * Required. A SQL boolean expression that represents the rows defined by this row access policy, similar to the boolean expression in a WHERE clause of a SELECT query on a table. References to other tables, routines, and temporary functions are not supported. Examples: region="EU" date_field = CAST('2019-9-27' as DATE) nullable_field is not NULL numeric_field BETWEEN 1.0 AND 5.0 + */ + filterPredicate?: string; + /** + * Output only. The time when this row access policy was last modified, in milliseconds since the epoch. + */ + lastModifiedTime?: string; + /** + * Required. Reference describing the ID of this row access policy. + */ + rowAccessPolicyReference?: IRowAccessPolicyReference; + }; + + type IRowAccessPolicyReference = { + /** + * [Required] The ID of the dataset containing this row access policy. + */ + datasetId?: string; + /** + * [Required] The ID of the row access policy. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. + */ + policyId?: string; + /** + * [Required] The ID of the project containing this row access policy. + */ + projectId?: string; + /** + * [Required] The ID of the table containing this row access policy. + */ + tableId?: string; + }; + + type IRowLevelSecurityStatistics = { + /** + * [Output-only] [Preview] Whether any accessed data was protected by row access policies. + */ + rowLevelSecurityApplied?: boolean; + }; + + type IScriptStackFrame = { + /** + * [Output-only] One-based end column. + */ + endColumn?: number; + /** + * [Output-only] One-based end line. + */ + endLine?: number; + /** + * [Output-only] Name of the active procedure, empty if in a top-level script. + */ + procedureId?: string; + /** + * [Output-only] One-based start column. + */ + startColumn?: number; + /** + * [Output-only] One-based start line. + */ + startLine?: number; + /** + * [Output-only] Text of the current statement/expression. + */ + text?: string; + }; + + type IScriptStatistics = { + /** + * [Output-only] Whether this child job was a statement or expression. + */ + evaluationKind?: string; + /** + * Stack trace showing the line/column/procedure name of each frame on the stack at the point where the current evaluation happened. The leaf frame is first, the primary script is last. Never empty. + */ + stackFrames?: Array; + }; + + /** + * Request message for `SetIamPolicy` method. + */ + type ISetIamPolicyRequest = { + /** + * REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) might reject them. + */ + policy?: IPolicy; + /** + * OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"` + */ + updateMask?: string; + }; + + type ISnapshotDefinition = { + /** + * [Required] Reference describing the ID of the table that is snapshotted. + */ + baseTableReference?: ITableReference; + /** + * [Required] The time at which the base table was snapshot. + */ + snapshotTime?: string; + }; + + /** + * The type of a variable, e.g., a function argument. Examples: INT64: {type_kind="INT64"} ARRAY: {type_kind="ARRAY", array_element_type="STRING"} STRUCT>: {type_kind="STRUCT", struct_type={fields=[ {name="x", type={type_kind="STRING"}}, {name="y", type={type_kind="ARRAY", array_element_type="DATE"}} ]}} + */ + type IStandardSqlDataType = { + /** + * The type of the array's elements, if type_kind = "ARRAY". + */ + arrayElementType?: IStandardSqlDataType; + /** + * The fields of this struct, in order, if type_kind = "STRUCT". + */ + structType?: IStandardSqlStructType; + /** + * Required. The top level type of this field. Can be any standard SQL data type (e.g., "INT64", "DATE", "ARRAY"). + */ + typeKind?: + | 'TYPE_KIND_UNSPECIFIED' + | 'INT64' + | 'BOOL' + | 'FLOAT64' + | 'STRING' + | 'BYTES' + | 'TIMESTAMP' + | 'DATE' + | 'TIME' + | 'DATETIME' + | 'GEOGRAPHY' + | 'NUMERIC' + | 'BIGNUMERIC' + | 'ARRAY' + | 'STRUCT'; + }; + + /** + * A field or a column. + */ + type IStandardSqlField = { + /** + * Optional. The name of this field. Can be absent for struct fields. + */ + name?: string; + /** + * Optional. The type of this parameter. Absent if not explicitly specified (e.g., CREATE FUNCTION statement can omit the return type; in this case the output parameter does not have this "type" field). + */ + type?: IStandardSqlDataType; + }; + + type IStandardSqlStructType = {fields?: Array}; + + type IStreamingbuffer = { + /** + * [Output-only] A lower-bound estimate of the number of bytes currently in the streaming buffer. + */ + estimatedBytes?: string; + /** + * [Output-only] A lower-bound estimate of the number of rows currently in the streaming buffer. + */ + estimatedRows?: string; + /** + * [Output-only] Contains the timestamp of the oldest entry in the streaming buffer, in milliseconds since the epoch, if the streaming buffer is available. + */ + oldestEntryTime?: string; + }; + + type ITable = { + /** + * [Beta] Clustering specification for the table. Must be specified with partitioning, data in the table will be first partitioned and subsequently clustered. + */ + clustering?: IClustering; + /** + * [Output-only] The time when this table was created, in milliseconds since the epoch. + */ + creationTime?: string; + /** + * [Optional] A user-friendly description of this table. + */ + description?: string; + /** + * Custom encryption configuration (e.g., Cloud KMS keys). + */ + encryptionConfiguration?: IEncryptionConfiguration; + /** + * [Output-only] A hash of the table metadata. Used to ensure there were no concurrent modifications to the resource when attempting an update. Not guaranteed to change when the table contents or the fields numRows, numBytes, numLongTermBytes or lastModifiedTime change. + */ + etag?: string; + /** + * [Optional] The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. The defaultTableExpirationMs property of the encapsulating dataset can be used to set a default expirationTime on newly created tables. + */ + expirationTime?: string; + /** + * [Optional] Describes the data format, location, and other properties of a table stored outside of BigQuery. By defining these properties, the data source can then be queried as if it were a standard BigQuery table. + */ + externalDataConfiguration?: IExternalDataConfiguration; + /** + * [Optional] A descriptive name for this table. + */ + friendlyName?: string; + /** + * [Output-only] An opaque ID uniquely identifying the table. + */ + id?: string; + /** + * [Output-only] The type of the resource. + */ + kind?: string; + /** + * The labels associated with this table. You can use these to organize and group your tables. Label keys and values can be no longer than 63 characters, can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key. + */ + labels?: {[key: string]: string}; + /** + * [Output-only] The time when this table was last modified, in milliseconds since the epoch. + */ + lastModifiedTime?: string; + /** + * [Output-only] The geographic location where the table resides. This value is inherited from the dataset. + */ + location?: string; + /** + * [Optional] Materialized view definition. + */ + materializedView?: IMaterializedViewDefinition; + /** + * [Output-only, Beta] Present iff this table represents a ML model. Describes the training information for the model, and it is required to run 'PREDICT' queries. + */ + model?: IModelDefinition; + /** + * [Output-only] The size of this table in bytes, excluding any data in the streaming buffer. + */ + numBytes?: string; + /** + * [Output-only] The number of bytes in the table that are considered "long-term storage". + */ + numLongTermBytes?: string; + /** + * [Output-only] [TrustedTester] The physical size of this table in bytes, excluding any data in the streaming buffer. This includes compression and storage used for time travel. + */ + numPhysicalBytes?: string; + /** + * [Output-only] The number of rows of data in this table, excluding any data in the streaming buffer. + */ + numRows?: string; + /** + * [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning and rangePartitioning should be specified. + */ + rangePartitioning?: IRangePartitioning; + /** + * [Optional] If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified. + */ + requirePartitionFilter?: boolean; + /** + * [Optional] Describes the schema of this table. + */ + schema?: ITableSchema; + /** + * [Output-only] A URL that can be used to access this resource again. + */ + selfLink?: string; + /** + * [Output-only] Snapshot definition. + */ + snapshotDefinition?: ISnapshotDefinition; + /** + * [Output-only] Contains information regarding this table's streaming buffer, if one is present. This field will be absent if the table is not being streamed to or if there is no data in the streaming buffer. + */ + streamingBuffer?: IStreamingbuffer; + /** + * [Required] Reference describing the ID of this table. + */ + tableReference?: ITableReference; + /** + * Time-based partitioning specification for this table. Only one of timePartitioning and rangePartitioning should be specified. + */ + timePartitioning?: ITimePartitioning; + /** + * [Output-only] Describes the table type. The following values are supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined by a SQL query. SNAPSHOT: An immutable, read-only table that is a copy of another table. [TrustedTester] MATERIALIZED_VIEW: SQL query whose result is persisted. EXTERNAL: A table that references data stored in an external storage system, such as Google Cloud Storage. The default value is TABLE. + */ + type?: string; + /** + * [Optional] The view definition. + */ + view?: IViewDefinition; + }; + + type ITableCell = {v?: any}; + + type ITableDataInsertAllRequest = { + /** + * [Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. Default is false, which treats unknown values as errors. + */ + ignoreUnknownValues?: boolean; + /** + * The resource type of the response. + */ + kind?: string; + /** + * The rows to insert. + */ + rows?: Array<{ + /** + * [Optional] A unique ID for each row. BigQuery uses this property to detect duplicate insertion requests on a best-effort basis. + */ + insertId?: string; + /** + * [Required] A JSON object that contains a row of data. The object's properties and values must match the destination table's schema. + */ + json?: IJsonObject; + }>; + /** + * [Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist. + */ + skipInvalidRows?: boolean; + /** + * If specified, treats the destination table as a base template, and inserts the rows into an instance table named "{destination}{templateSuffix}". BigQuery will manage creation of the instance table, using the schema of the base template table. See https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables for considerations when working with templates tables. + */ + templateSuffix?: string; + }; + + type ITableDataInsertAllResponse = { + /** + * An array of errors for rows that were not inserted. + */ + insertErrors?: Array<{ + /** + * Error information for the row indicated by the index property. + */ + errors?: Array; + /** + * The index of the row that error applies to. + */ + index?: number; + }>; + /** + * The resource type of the response. + */ + kind?: string; + }; + + type ITableDataList = { + /** + * A hash of this page of results. + */ + etag?: string; + /** + * The resource type of the response. + */ + kind?: string; + /** + * A token used for paging results. Providing this token instead of the startIndex parameter can help you retrieve stable results when an underlying table is changing. + */ + pageToken?: string; + /** + * Rows of results. + */ + rows?: Array; + /** + * The total number of rows in the complete table. + */ + totalRows?: string; + }; + + type ITableFieldSchema = { + /** + * [Optional] The categories attached to this field, used for field-level access control. + */ + categories?: { + /** + * A list of category resource names. For example, "projects/1/taxonomies/2/categories/3". At most 5 categories are allowed. + */ + names?: Array; + }; + /** + * [Optional] The field description. The maximum length is 1,024 characters. + */ + description?: string; + /** + * [Optional] Describes the nested schema fields if the type property is set to RECORD. + */ + fields?: Array; + /** + * [Optional] The field mode. Possible values include NULLABLE, REQUIRED and REPEATED. The default value is NULLABLE. + */ + mode?: string; + /** + * [Required] The field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_), and must start with a letter or underscore. The maximum length is 128 characters. + */ + name?: string; + policyTags?: { + /** + * A list of category resource names. For example, "projects/1/location/eu/taxonomies/2/policyTags/3". At most 1 policy tag is allowed. + */ + names?: Array; + }; + /** + * [Required] The field data type. Possible values include STRING, BYTES, INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as FLOAT), BOOLEAN, BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME, DATETIME, RECORD (where RECORD indicates that the field contains a nested schema) or STRUCT (same as RECORD). + */ + type?: string; + }; + + type ITableList = { + /** + * A hash of this page of results. + */ + etag?: string; + /** + * The type of list. + */ + kind?: string; + /** + * A token to request the next page of results. + */ + nextPageToken?: string; + /** + * Tables in the requested dataset. + */ + tables?: Array<{ + /** + * [Beta] Clustering specification for this table, if configured. + */ + clustering?: IClustering; + /** + * The time when this table was created, in milliseconds since the epoch. + */ + creationTime?: string; + /** + * [Optional] The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + expirationTime?: string; + /** + * The user-friendly name for this table. + */ + friendlyName?: string; + /** + * An opaque ID of the table + */ + id?: string; + /** + * The resource type. + */ + kind?: string; + /** + * The labels associated with this table. You can use these to organize and group your tables. + */ + labels?: {[key: string]: string}; + /** + * The range partitioning specification for this table, if configured. + */ + rangePartitioning?: IRangePartitioning; + /** + * A reference uniquely identifying the table. + */ + tableReference?: ITableReference; + /** + * The time-based partitioning specification for this table, if configured. + */ + timePartitioning?: ITimePartitioning; + /** + * The type of table. Possible values are: TABLE, VIEW. + */ + type?: string; + /** + * Additional details for a view. + */ + view?: { + /** + * True if view is defined in legacy SQL dialect, false if in standard SQL. + */ + useLegacySql?: boolean; + }; + }>; + /** + * The total number of tables in the dataset. + */ + totalItems?: number; + }; + + type ITableReference = { + /** + * [Required] The ID of the dataset containing this table. + */ + datasetId?: string; + /** + * [Required] The ID of the project containing this table. + */ + projectId?: string; + /** + * [Required] The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. + */ + tableId?: string; + }; + + type ITableRow = { + /** + * Represents a single row in the result set, consisting of one or more fields. + */ + f?: Array; + }; + + type ITableSchema = { + /** + * Describes the fields in a table. + */ + fields?: Array; + }; + + /** + * Request message for `TestIamPermissions` method. + */ + type ITestIamPermissionsRequest = { + /** + * The set of permissions to check for the `resource`. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions). + */ + permissions?: Array; + }; + + /** + * Response message for `TestIamPermissions` method. + */ + type ITestIamPermissionsResponse = { + /** + * A subset of `TestPermissionsRequest.permissions` that the caller is allowed. + */ + permissions?: Array; + }; + + type ITimePartitioning = { + /** + * [Optional] Number of milliseconds for which to keep the storage for partitions in the table. The storage in a partition will have an expiration time of its partition time plus this value. + */ + expirationMs?: string; + /** + * [Beta] [Optional] If not set, the table is partitioned by pseudo column, referenced via either '_PARTITIONTIME' as TIMESTAMP type, or '_PARTITIONDATE' as DATE type. If field is specified, the table is instead partitioned by this field. The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED. + */ + field?: string; + requirePartitionFilter?: boolean; + /** + * [Required] The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively. When the type is not specified, the default behavior is DAY. + */ + type?: string; + }; + + type ITrainingOptions = { + /** + * Whether to enable auto ARIMA or not. + */ + autoArima?: boolean; + /** + * The max value of non-seasonal p and q. + */ + autoArimaMaxOrder?: string; + /** + * Batch size for dnn models. + */ + batchSize?: string; + /** + * The data frequency of a time series. + */ + dataFrequency?: + | 'DATA_FREQUENCY_UNSPECIFIED' + | 'AUTO_FREQUENCY' + | 'YEARLY' + | 'QUARTERLY' + | 'MONTHLY' + | 'WEEKLY' + | 'DAILY' + | 'HOURLY'; + /** + * The column to split data with. This column won't be used as a feature. 1. When data_split_method is CUSTOM, the corresponding column should be boolean. The rows with true value tag are eval data, and the false are training data. 2. When data_split_method is SEQ, the first DATA_SPLIT_EVAL_FRACTION rows (from smallest to largest) in the corresponding column are used as training data, and the rest are eval data. It respects the order in Orderable data types: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type-properties + */ + dataSplitColumn?: string; + /** + * The fraction of evaluation data over the whole input data. The rest of data will be used as training data. The format should be double. Accurate to two decimal places. Default value is 0.2. + */ + dataSplitEvalFraction?: number; + /** + * The data split type for training and evaluation, e.g. RANDOM. + */ + dataSplitMethod?: + | 'DATA_SPLIT_METHOD_UNSPECIFIED' + | 'RANDOM' + | 'CUSTOM' + | 'SEQUENTIAL' + | 'NO_SPLIT' + | 'AUTO_SPLIT'; + /** + * Distance type for clustering models. + */ + distanceType?: 'DISTANCE_TYPE_UNSPECIFIED' | 'EUCLIDEAN' | 'COSINE'; + /** + * Dropout probability for dnn models. + */ + dropout?: number; + /** + * Whether to stop early when the loss doesn't improve significantly any more (compared to min_relative_progress). Used only for iterative training algorithms. + */ + earlyStop?: boolean; + /** + * Feedback type that specifies which algorithm to run for matrix factorization. + */ + feedbackType?: 'FEEDBACK_TYPE_UNSPECIFIED' | 'IMPLICIT' | 'EXPLICIT'; + /** + * Hidden units for dnn models. + */ + hiddenUnits?: Array; + /** + * The geographical region based on which the holidays are considered in time series modeling. If a valid value is specified, then holiday effects modeling is enabled. + */ + holidayRegion?: + | 'HOLIDAY_REGION_UNSPECIFIED' + | 'GLOBAL' + | 'NA' + | 'JAPAC' + | 'EMEA' + | 'LAC' + | 'AE' + | 'AR' + | 'AT' + | 'AU' + | 'BE' + | 'BR' + | 'CA' + | 'CH' + | 'CL' + | 'CN' + | 'CO' + | 'CS' + | 'CZ' + | 'DE' + | 'DK' + | 'DZ' + | 'EC' + | 'EE' + | 'EG' + | 'ES' + | 'FI' + | 'FR' + | 'GB' + | 'GR' + | 'HK' + | 'HU' + | 'ID' + | 'IE' + | 'IL' + | 'IN' + | 'IR' + | 'IT' + | 'JP' + | 'KR' + | 'LV' + | 'MA' + | 'MX' + | 'MY' + | 'NG' + | 'NL' + | 'NO' + | 'NZ' + | 'PE' + | 'PH' + | 'PK' + | 'PL' + | 'PT' + | 'RO' + | 'RS' + | 'RU' + | 'SA' + | 'SE' + | 'SG' + | 'SI' + | 'SK' + | 'TH' + | 'TR' + | 'TW' + | 'UA' + | 'US' + | 'VE' + | 'VN' + | 'ZA'; + /** + * The number of periods ahead that need to be forecasted. + */ + horizon?: string; + /** + * Include drift when fitting an ARIMA model. + */ + includeDrift?: boolean; + /** + * Specifies the initial learning rate for the line search learn rate strategy. + */ + initialLearnRate?: number; + /** + * Name of input label columns in training data. + */ + inputLabelColumns?: Array; + /** + * Item column specified for matrix factorization models. + */ + itemColumn?: string; + /** + * The column used to provide the initial centroids for kmeans algorithm when kmeans_initialization_method is CUSTOM. + */ + kmeansInitializationColumn?: string; + /** + * The method used to initialize the centroids for kmeans algorithm. + */ + kmeansInitializationMethod?: + | 'KMEANS_INITIALIZATION_METHOD_UNSPECIFIED' + | 'RANDOM' + | 'CUSTOM' + | 'KMEANS_PLUS_PLUS'; + /** + * L1 regularization coefficient. + */ + l1Regularization?: number; + /** + * L2 regularization coefficient. + */ + l2Regularization?: number; + /** + * Weights associated with each label class, for rebalancing the training data. Only applicable for classification models. + */ + labelClassWeights?: {[key: string]: number}; + /** + * Learning rate in training. Used only for iterative training algorithms. + */ + learnRate?: number; + /** + * The strategy to determine learn rate for the current iteration. + */ + learnRateStrategy?: + | 'LEARN_RATE_STRATEGY_UNSPECIFIED' + | 'LINE_SEARCH' + | 'CONSTANT'; + /** + * Type of loss function used during training run. + */ + lossType?: 'LOSS_TYPE_UNSPECIFIED' | 'MEAN_SQUARED_LOSS' | 'MEAN_LOG_LOSS'; + /** + * The maximum number of iterations in training. Used only for iterative training algorithms. + */ + maxIterations?: string; + /** + * Maximum depth of a tree for boosted tree models. + */ + maxTreeDepth?: string; + /** + * When early_stop is true, stops training when accuracy improvement is less than 'min_relative_progress'. Used only for iterative training algorithms. + */ + minRelativeProgress?: number; + /** + * Minimum split loss for boosted tree models. + */ + minSplitLoss?: number; + /** + * [Beta] Google Cloud Storage URI from which the model was imported. Only applicable for imported models. + */ + modelUri?: string; + /** + * A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are the AR order, the degree of differencing, and the MA order. + */ + nonSeasonalOrder?: IArimaOrder; + /** + * Number of clusters for clustering models. + */ + numClusters?: string; + /** + * Num factors specified for matrix factorization models. + */ + numFactors?: string; + /** + * Optimization strategy for training linear regression models. + */ + optimizationStrategy?: + | 'OPTIMIZATION_STRATEGY_UNSPECIFIED' + | 'BATCH_GRADIENT_DESCENT' + | 'NORMAL_EQUATION'; + /** + * Whether to preserve the input structs in output feature names. Suppose there is a struct A with field b. When false (default), the output feature name is A_b. When true, the output feature name is A.b. + */ + preserveInputStructs?: boolean; + /** + * Subsample fraction of the training data to grow tree to prevent overfitting for boosted tree models. + */ + subsample?: number; + /** + * Column to be designated as time series data for ARIMA model. + */ + timeSeriesDataColumn?: string; + /** + * The id column that will be used to indicate different time series to forecast in parallel. + */ + timeSeriesIdColumn?: string; + /** + * Column to be designated as time series timestamp for ARIMA model. + */ + timeSeriesTimestampColumn?: string; + /** + * User column specified for matrix factorization models. + */ + userColumn?: string; + /** + * Hyperparameter for matrix factoration when implicit feedback type is specified. + */ + walsAlpha?: number; + /** + * Whether to train a model from the last checkpoint. + */ + warmStart?: boolean; + }; + + /** + * Information about a single training query run for the model. + */ + type ITrainingRun = { + /** + * Data split result of the training run. Only set when the input data is actually split. + */ + dataSplitResult?: IDataSplitResult; + /** + * The evaluation metrics over training/eval data that were computed at the end of training. + */ + evaluationMetrics?: IEvaluationMetrics; + /** + * Global explanations for important features of the model. For multi-class models, there is one entry for each label class. For other models, there is only one entry in the list. + */ + globalExplanations?: Array; + /** + * Output of each iteration run, results.size() <= max_iterations. + */ + results?: Array; + /** + * The start time of this training run. + */ + startTime?: string; + /** + * Options that were used for this training run, includes user specified and default options that were used. + */ + trainingOptions?: ITrainingOptions; + }; + + type ITransactionInfo = { + /** + * [Output-only] // [Alpha] Id of the transaction. + */ + transactionId?: string; + }; + + /** + * This is used for defining User Defined Function (UDF) resources only when using legacy SQL. Users of Standard SQL should leverage either DDL (e.g. CREATE [TEMPORARY] FUNCTION ... ) or the Routines API to define UDF resources. For additional information on migrating, see: https://cloud.google.com/bigquery/docs/reference/standard-sql/migrating-from-legacy-sql#differences_in_user-defined_javascript_functions + */ + type IUserDefinedFunctionResource = { + /** + * [Pick one] An inline resource that contains code for a user-defined function (UDF). Providing a inline code resource is equivalent to providing a URI for a file containing the same code. + */ + inlineCode?: string; + /** + * [Pick one] A code resource to load from a Google Cloud Storage URI (gs://bucket/path). + */ + resourceUri?: string; + }; + + type IViewDefinition = { + /** + * [Required] A query that BigQuery executes when the view is referenced. + */ + query?: string; + /** + * Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ Queries and views that reference this view must use the same flag value. + */ + useLegacySql?: boolean; + /** + * Describes user-defined function resources used in the query. + */ + userDefinedFunctionResources?: Array; + }; + + namespace datasets { + /** + * Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name. + */ + type IDeleteParams = { + /** + * If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False + */ + deleteContents?: boolean; + }; + + /** + * Lists all datasets in the specified project to which you have been granted the READER dataset role. + */ + type IListParams = { + /** + * Whether to list all datasets, including hidden ones + */ + all?: boolean; + /** + * An expression for filtering the results of the request by label. The syntax is "labels.[:]". Multiple filters can be ANDed together by connecting with a space. Example: "labels.department:receiving labels.active". See Filtering datasets using labels for details. + */ + filter?: string; + /** + * The maximum number of results to return + */ + maxResults?: number; + /** + * Page token, returned by a previous call, to request the next page of results + */ + pageToken?: string; + }; + } + + namespace jobs { + /** + * Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs. + */ + type ICancelParams = { + /** + * The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location. + */ + location?: string; + }; + + /** + * Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role. + */ + type IGetParams = { + /** + * The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location. + */ + location?: string; + }; + + /** + * Retrieves the results of a query job. + */ + type IGetQueryResultsParams = { + /** + * The geographic location where the job should run. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location. + */ + location?: string; + /** + * Maximum number of results to read + */ + maxResults?: number; + /** + * Page token, returned by a previous call, to request the next page of results + */ + pageToken?: string; + /** + * Zero-based index of the starting row + */ + startIndex?: string; + /** + * How long to wait for the query to complete, in milliseconds, before returning. Default is 10 seconds. If the timeout passes before the job completes, the 'jobComplete' field in the response will be false + */ + timeoutMs?: number; + }; + + /** + * Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property. + */ + type IListParams = { + /** + * Whether to display jobs owned by all users in the project. Default false + */ + allUsers?: boolean; + /** + * Max value for job creation time, in milliseconds since the POSIX epoch. If set, only jobs created before or at this timestamp are returned + */ + maxCreationTime?: string; + /** + * Maximum number of results to return + */ + maxResults?: number; + /** + * Min value for job creation time, in milliseconds since the POSIX epoch. If set, only jobs created after or at this timestamp are returned + */ + minCreationTime?: string; + /** + * Page token, returned by a previous call, to request the next page of results + */ + pageToken?: string; + /** + * If set, retrieves only jobs whose parent is this job. Otherwise, retrieves only jobs which have no parent + */ + parentJobId?: string; + /** + * Restrict information returned to a set of selected fields + */ + projection?: 'full' | 'minimal'; + /** + * Filter for job state + */ + stateFilter?: 'done' | 'pending' | 'running'; + }; + } + + namespace models { + /** + * Lists all models in the specified dataset. Requires the READER dataset role. + */ + type IListParams = { + /** + * The maximum number of results to return in a single response page. Leverage the page tokens to iterate through the entire collection. + */ + maxResults?: number; + /** + * Page token, returned by a previous call to request the next page of results + */ + pageToken?: string; + }; + } + + namespace projects { + /** + * Lists all projects to which you have been granted any project role. + */ + type IListParams = { + /** + * Maximum number of results to return + */ + maxResults?: number; + /** + * Page token, returned by a previous call, to request the next page of results + */ + pageToken?: string; + }; + } + + namespace routines { + /** + * Gets the specified routine resource by routine ID. + */ + type IGetParams = { + /** + * If set, only the Routine fields in the field mask are returned in the response. If unset, all Routine fields are returned. + */ + readMask?: string; + }; + + /** + * Lists all routines in the specified dataset. Requires the READER dataset role. + */ + type IListParams = { + /** + * If set, then only the Routines matching this filter are returned. The current supported form is either "routine_type:" or "routineType:", where is a RoutineType enum. Example: "routineType:SCALAR_FUNCTION". + */ + filter?: string; + /** + * The maximum number of results to return in a single response page. Leverage the page tokens to iterate through the entire collection. + */ + maxResults?: number; + /** + * Page token, returned by a previous call, to request the next page of results + */ + pageToken?: string; + /** + * If set, then only the Routine fields in the field mask, as well as project_id, dataset_id and routine_id, are returned in the response. If unset, then the following Routine fields are returned: etag, project_id, dataset_id, routine_id, routine_type, creation_time, last_modified_time, and language. + */ + readMask?: string; + }; + } + + namespace rowAccessPolicies { + /** + * Lists all row access policies on the specified table. + */ + type IListParams = { + /** + * The maximum number of results to return in a single response page. Leverage the page tokens to iterate through the entire collection. + */ + pageSize?: number; + /** + * Page token, returned by a previous call, to request the next page of results. + */ + pageToken?: string; + }; + } + + namespace tabledata { + /** + * Retrieves table data from a specified set of rows. Requires the READER dataset role. + */ + type IListParams = { + /** + * Maximum number of results to return + */ + maxResults?: number; + /** + * Page token, returned by a previous call, identifying the result set + */ + pageToken?: string; + /** + * List of fields to return (comma-separated). If unspecified, all fields are returned + */ + selectedFields?: string; + /** + * Zero-based index of the starting row to read + */ + startIndex?: string; + }; + } + + namespace tables { + /** + * Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table. + */ + type IGetParams = { + /** + * List of fields to return (comma-separated). If unspecified, all fields are returned + */ + selectedFields?: string; + }; + + /** + * Lists all tables in the specified dataset. Requires the READER dataset role. + */ + type IListParams = { + /** + * Maximum number of results to return + */ + maxResults?: number; + /** + * Page token, returned by a previous call, to request the next page of results + */ + pageToken?: string; + }; + } +} + +export default bigquery; diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index 123cdc24..00000000 --- a/synth.metadata +++ /dev/null @@ -1,12 +0,0 @@ -{ - "updateTime": "2019-03-08T00:45:35.854983Z", - "sources": [ - { - "template": { - "name": "node_library", - "origin": "synthtool.gcp", - "version": "2019.1.16" - } - } - ] -} \ No newline at end of file diff --git a/synth.py b/synth.py deleted file mode 100644 index 514ee88f..00000000 --- a/synth.py +++ /dev/null @@ -1,8 +0,0 @@ -import synthtool as s -import synthtool.gcp as gcp -import logging - -logging.basicConfig(level=logging.DEBUG) -common_templates = gcp.CommonTemplates() -templates = common_templates.node_library(source_location='build/src') -s.copy(templates) diff --git a/system-test/bigquery.ts b/system-test/bigquery.ts index 61727a30..2810e899 100644 --- a/system-test/bigquery.ts +++ b/system-test/bigquery.ts @@ -1,34 +1,45 @@ -/** - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. import {ApiError} from '@google-cloud/common'; -import {DecorateRequestOptions, GoogleErrorBody} from '@google-cloud/common/build/src/util'; +import { + DecorateRequestOptions, + GoogleErrorBody, +} from '@google-cloud/common/build/src/util'; import {Storage} from '@google-cloud/storage'; import * as assert from 'assert'; +import {describe, it, before, after} from 'mocha'; import Big from 'big.js'; import * as fs from 'fs'; import * as uuid from 'uuid'; -import {BigQuery, Dataset, GetDatasetsOptions, Job, RowMetadata, Table} from '../src'; +import { + BigQuery, + Dataset, + Job, + Model, + RowMetadata, + Routine, + Table, +} from '../src'; const bigquery = new BigQuery(); const storage = new Storage(); describe('BigQuery', () => { const GCLOUD_TESTS_PREFIX = 'nodejs_bq_test'; + const minCreationTime = Date.now().toString(); const dataset = bigquery.dataset(generateName('dataset')); const table = dataset.table(generateName('table')); @@ -37,6 +48,10 @@ describe('BigQuery', () => { const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT 100'; const SCHEMA = [ + { + name: 'place', + type: 'GEOGRAPHY', + }, { name: 'id', type: 'INTEGER', @@ -125,7 +140,7 @@ describe('BigQuery', () => { let numRequestsMade = 0; const bigquery = new BigQuery(); - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any (bigquery as any).interceptors.push({ request: (reqOpts: DecorateRequestOptions) => { numRequestsMade++; @@ -153,7 +168,7 @@ describe('BigQuery', () => { let numRequestsMade = 0; const bigquery = new BigQuery(); - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any (bigquery as any).interceptors.push({ request: (reqOpts: DecorateRequestOptions) => { numRequestsMade++; @@ -162,16 +177,16 @@ describe('BigQuery', () => { }); return bigquery - .getDatasets({ - maxApiCalls, - }) - .then(() => { - assert.strictEqual(numRequestsMade, maxApiCalls); - }); + .getDatasets({ + maxApiCalls, + }) + .then(() => { + assert.strictEqual(numRequestsMade, maxApiCalls); + }); }); it('should allow for manual pagination in promise mode', async () => { - const [datasets, nextQuery, apiResponse] = await bigquery.getDatasets({ + const [datasets, , apiResponse] = await bigquery.getDatasets({ autoPaginate: false, filter: `labels.${GCLOUD_TESTS_PREFIX}`, }); @@ -182,16 +197,16 @@ describe('BigQuery', () => { it('should list datasets as a stream', done => { let datasetEmitted = false; - bigquery.getDatasetsStream() - .on('error', done) - .on('data', - dataset => { - datasetEmitted = dataset instanceof Dataset; - }) - .on('end', () => { - assert.strictEqual(datasetEmitted, true); - done(); - }); + bigquery + .getDatasetsStream() + .on('error', done) + .on('data', dataset => { + datasetEmitted = dataset instanceof Dataset; + }) + .on('end', () => { + assert.strictEqual(datasetEmitted, true); + done(); + }); }); it('should run a query job, then get results', done => { @@ -211,33 +226,35 @@ describe('BigQuery', () => { it('should run a query job as a promise', () => { let job: Job; - return bigquery.createQueryJob(query) - .then(response => { - job = response[0]; - return job.promise(); - }) - .then(() => { - return job.getQueryResults(); - }) - .then(response => { - const rows = response[0]; - assert.strictEqual(rows!.length, 100); - assert.strictEqual(typeof rows[0].url, 'string'); - }); + return bigquery + .createQueryJob(query) + .then(response => { + job = response[0]; + return job.promise(); + }) + .then(() => { + return job.getQueryResults(); + }) + .then(response => { + const rows = response[0]; + assert.strictEqual(rows!.length, 100); + assert.strictEqual(typeof rows[0].url, 'string'); + }); }); it('should get query results as a stream', done => { bigquery.createQueryJob(query, (err, job) => { assert.ifError(err); const rowsEmitted = new Array(); - job!.getQueryResultsStream() - .on('error', done) - .on('data', row => rowsEmitted.push(row)) - .on('end', () => { - assert.strictEqual(rowsEmitted.length, 100); - assert.strictEqual(typeof rowsEmitted[0].url, 'string'); - done(); - }); + job! + .getQueryResultsStream() + .on('error', done) + .on('data', row => rowsEmitted.push(row)) + .on('end', () => { + assert.strictEqual(rowsEmitted.length, 100); + assert.strictEqual(typeof rowsEmitted[0].url, 'string'); + done(); + }); }); }); @@ -285,20 +302,33 @@ describe('BigQuery', () => { }); }); + it('should honor the labels option', done => { + const options = { + query, + labels: {foo: 'bar'}, + }; + + bigquery.createQueryJob(options, (err, _, job) => { + assert.ifError(err); + assert(job!.configuration!.labels); + done(); + }); + }); + it('should query as a stream', done => { let rowsEmitted = 0; - bigquery.createQueryStream(query) - .on('data', - row => { - rowsEmitted++; - assert.strictEqual(typeof row.url, 'string'); - }) - .on('error', done) - .on('end', () => { - assert.strictEqual(rowsEmitted, 100); - done(); - }); + bigquery + .createQueryStream(query) + .on('data', row => { + rowsEmitted++; + assert.strictEqual(typeof row.url, 'string'); + }) + .on('error', done) + .on('end', () => { + assert.strictEqual(rowsEmitted, 100); + done(); + }); }); it('should query', done => { @@ -311,34 +341,37 @@ describe('BigQuery', () => { it('should allow querying in series', done => { bigquery.query( - query, { - maxResults: 10, - }, - (err, rows, nextQuery) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 10); - assert.strictEqual(typeof nextQuery!.pageToken, 'string'); - done(); - }); + query, + { + maxResults: 10, + }, + (err, rows, nextQuery) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 10); + assert.strictEqual(typeof nextQuery!.pageToken, 'string'); + done(); + } + ); }); it('should accept the dryRun option', done => { bigquery.query( - { - query, - dryRun: true, - }, - (err, rows, resp) => { - assert.ifError(err); - assert.deepStrictEqual(rows, []); - // tslint:disable-next-line no-any - assert((resp as any).statistics.query); - done(); - }); + { + query, + dryRun: true, + }, + (err, rows, resp) => { + assert.ifError(err); + assert.deepStrictEqual(rows, []); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + assert((resp as any).statistics.query); + done(); + } + ); }); it('should get a list of jobs', done => { - bigquery.getJobs((err, jobs) => { + bigquery.getJobs({minCreationTime}, (err, jobs) => { assert.ifError(err); assert(jobs![0] instanceof Job); done(); @@ -348,16 +381,16 @@ describe('BigQuery', () => { it('should list jobs as a stream', done => { let jobEmitted = false; - bigquery.getJobsStream() - .on('error', done) - .on('data', - job => { - jobEmitted = job instanceof Job; - }) - .on('end', () => { - assert.strictEqual(jobEmitted, true); - done(); - }); + bigquery + .getJobsStream({minCreationTime}) + .on('error', done) + .on('data', job => { + jobEmitted = job instanceof Job; + }) + .on('end', () => { + assert.strictEqual(jobEmitted, true); + done(); + }); }); it('should cancel a job', done => { @@ -399,11 +432,13 @@ describe('BigQuery', () => { it('should error out for bad etags', async () => { await assert.rejects( - () => dataset.setMetadata({ + () => + dataset.setMetadata({ etag: 'a-fake-etag', description: 'oh no!', }), - /precondition/i); + /precondition/i + ); }); it('should get tables', done => { @@ -417,13 +452,14 @@ describe('BigQuery', () => { it('should get tables as a stream', done => { let tableEmitted = false; - dataset.getTablesStream() - .on('error', done) - .on('data', table => tableEmitted = table instanceof Table) - .on('end', () => { - assert.strictEqual(tableEmitted, true); - done(); - }); + dataset + .getTablesStream() + .on('error', done) + .on('data', table => (tableEmitted = table instanceof Table)) + .on('end', () => { + assert.strictEqual(tableEmitted, true); + done(); + }); }); it('should create a Table with a nested schema', async () => { @@ -462,16 +498,18 @@ describe('BigQuery', () => { let job: Job; const QUERY = `SELECT * FROM \`${table.id}\``; + // eslint-disable-next-line @typescript-eslint/no-var-requires const SCHEMA = require('../../system-test/data/schema.json'); - const TEST_DATA_FILE = - require.resolve('../../system-test/data/location-test-data.json'); + const TEST_DATA_FILE = require.resolve( + '../../system-test/data/location-test-data.json' + ); before(async () => { // create a dataset in a certain location will cascade the location // to any jobs created through it await dataset.create(); await table.create({schema: SCHEMA}); - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any job = ((await table.createLoadJob(TEST_DATA_FILE)) as any)[0]; await job.promise(); }); @@ -526,15 +564,16 @@ describe('BigQuery', () => { const badDataset = bigquery.dataset(dataset.id!, {location: 'US'}); badDataset.createQueryJob( - { - query: QUERY, - }, - (e, job) => { - const err = e as {} as GoogleErrorBody; - assert.strictEqual(err.errors![0].reason, 'notFound'); - assert.strictEqual(job!.location, 'US'); - done(); - }); + { + query: QUERY, + }, + (e, job) => { + const err = (e as {}) as GoogleErrorBody; + assert.strictEqual(err.errors![0].reason, 'notFound'); + assert.strictEqual(job!.location, 'US'); + done(); + } + ); }); it('should get query results', async () => { @@ -544,6 +583,14 @@ describe('BigQuery', () => { const [rows] = await job.getQueryResults(); assert(rows!.length > 0); }); + + it('should return error if timeout passed', async () => { + const [job] = await dataset.createQueryJob(QUERY); + assert.strictEqual(job.location, LOCATION); + await job.promise(); + const options = {timeoutMs: 1}; + await job.getQueryResults(options); + }); }); describe('job.insert', () => { @@ -586,7 +633,7 @@ describe('BigQuery', () => { }); }); - it('should extract the table', () => { + it('should extract a table', () => { return table.createExtractJob(extractFile).then(data => { const job = data[0]; @@ -599,9 +646,146 @@ describe('BigQuery', () => { }); }); + describe('BigQuery/Model', () => { + let model: Model; + const bucket = storage.bucket(generateName('bucket')); + const extractDest = + 'gs://' + bucket.name + '/' + generateName('model-export'); + + before(async () => { + await bucket.create(); + + model = dataset.model('testmodel'); + return bigquery.query(` + CREATE MODEL \`${dataset.id}.${model.id}\` + OPTIONS ( + model_type='linear_reg', + max_iterations=1, + learn_rate=0.4, + learn_rate_strategy='constant' + ) AS ( + SELECT 'a' AS f1, 2.0 AS label + UNION ALL + SELECT 'b' AS f2, 3.8 AS label + ) + `); + }); + + after(() => model.delete()); + + it('should get a list of models', async () => { + const [models] = await dataset.getModels(); + assert.strictEqual(models.length, 1); + assert.ok(models[0] instanceof Model); + }); + + it('should check if a model exists', async () => { + const [exists] = await model.exists(); + assert.ok(exists); + }); + + it('should get a model', async () => { + const [model2] = await model.get(); + assert.deepStrictEqual(model, model2); + }); + + it('should get a model metadata', async () => { + const [metadata] = await model.getMetadata(); + assert.deepStrictEqual(metadata, model.metadata); + }); + + it('should set model metadata', async () => { + const friendlyName = 'modelfriend'; + await model.setMetadata({friendlyName}); + const [metadata] = await model.getMetadata(); + assert.strictEqual(metadata.friendlyName, friendlyName); + }); + + it('should extract a model', async () => { + const jobId = generateName('model-export-job'); + + const [job] = await model.createExtractJob(extractDest, {jobId}); + assert.strictEqual(job.id, jobId); + }); + }); + + describe('BigQuery/Routine', () => { + before(() => { + const routineId = `${bigquery.projectId}.${dataset.id}.my_ddl_routine`; + + return bigquery.query(` + CREATE FUNCTION \`${routineId}\`( + arr ARRAY> + ) AS ( + (SELECT SUM(IF(elem.name = "foo",elem.val,null)) FROM UNNEST(arr) AS elem) + ) + `); + }); + + after(async () => { + const [routines] = await dataset.getRoutines(); + return Promise.all(routines.map(routine => routine.delete())); + }); + + it('should create a routine via insert', () => { + return dataset.createRoutine('my_routine', { + arguments: [ + { + name: 'x', + dataType: { + typeKind: 'INT64', + }, + }, + ], + definitionBody: 'x * 3', + routineType: 'SCALAR_FUNCTION', + returnType: { + typeKind: 'INT64', + }, + }); + }); + + it('should list all the routines', async () => { + const [routines] = await dataset.getRoutines(); + assert.ok(routines.length > 0); + assert.ok(routines[0] instanceof Routine); + }); + + it('should get the routines as a stream', done => { + const routines: Routine[] = []; + + dataset + .getRoutinesStream() + .on('error', done) + .on('data', routine => { + routines.push(routine); + }) + .on('end', () => { + assert.ok(routines.length > 0); + assert.ok(routines[0] instanceof Routine); + done(); + }); + }); + + it('should check to see if a routine exists', async () => { + const routine = dataset.routine('my_ddl_routine'); + const [exists] = await routine.exists(); + assert.ok(exists); + }); + + it('should update an existing routine', async () => { + const routine = dataset.routine('my_ddl_routine'); + const description = 'A routine!'; + + await routine.setMetadata({description}); + assert.strictEqual(routine.metadata.description, description); + }); + }); + describe('BigQuery/Table', () => { - const TEST_DATA_JSON_PATH = - require.resolve('../../system-test/data/kitten-test-data.json'); + const TEST_DATA_JSON_PATH = require.resolve( + '../../system-test/data/kitten-test-data.json' + ); it('should have created the correct schema', () => { assert.deepStrictEqual(table.metadata.schema.fields, SCHEMA); @@ -616,26 +800,26 @@ describe('BigQuery', () => { }); it('should get the rows in a table via stream', done => { - table.createReadStream() - .on('error', done) - .on('data', () => {}) - .on('end', done); + table + .createReadStream() + .on('error', done) + .on('data', () => {}) + .on('end', done); }); it('should insert rows via stream', done => { let job: Job; fs.createReadStream(TEST_DATA_JSON_PATH) - .pipe(table.createWriteStream('json')) - .on('error', done) - .on('complete', - _job => { - job = _job; - }) - .on('finish', () => { - assert.strictEqual(job.metadata.status.state, 'DONE'); - done(); - }); + .pipe(table.createWriteStream('json')) + .on('error', done) + .on('complete', _job => { + job = _job; + }) + .on('finish', () => { + assert.strictEqual(job.metadata.status.state, 'DONE'); + done(); + }); }); it('should insert rows with null values', () => { @@ -657,19 +841,18 @@ describe('BigQuery', () => { data?: {tableId?: number}; table: Table; } - const TABLES = [ - {data: {tableId: 1}}, - {}, - ] as {} as TableItem[]; + const TABLES = ([{data: {tableId: 1}}, {}] as {}) as TableItem[]; const SCHEMA = 'tableId:integer'; before(async () => { - TABLES.forEach(t => t.table = dataset.table(generateName('table'))); - await Promise.all(TABLES.map(tableItem => { - const tableInstance = tableItem.table; - return tableInstance!.create({schema: SCHEMA}); - })); + TABLES.forEach(t => (t.table = dataset.table(generateName('table')))); + await Promise.all( + TABLES.map(tableItem => { + const tableInstance = tableItem.table; + return tableInstance!.create({schema: SCHEMA}); + }) + ); const table1Instance = TABLES[0].table; await table1Instance.insert(TABLES[0].data); }); @@ -702,7 +885,7 @@ describe('BigQuery', () => { table1Instance.copy(table2Instance, (err, resp) => { assert.ifError(err); - assert.strictEqual(resp.status.state, 'DONE'); + assert.strictEqual(resp!.status!.state, 'DONE'); done(); }); }); @@ -735,7 +918,7 @@ describe('BigQuery', () => { table2Instance.copyFrom(table1Instance, (err, resp) => { assert.ifError(err); - assert.strictEqual(resp.status.state, 'DONE'); + assert.strictEqual(resp!.status!.state, 'DONE'); done(); }); }); @@ -746,9 +929,9 @@ describe('BigQuery', () => { before(done => { fs.createReadStream(TEST_DATA_JSON_PATH) - .pipe(file.createWriteStream({})) - .on('error', done) - .on('finish', done); + .pipe(file.createWriteStream({})) + .on('error', done) + .on('finish', done); }); after(done => { @@ -767,7 +950,7 @@ describe('BigQuery', () => { it('should load data from a storage file', done => { table.load(file, (err, resp) => { assert.ifError(err); - assert.strictEqual(resp.status.state, 'DONE'); + assert.strictEqual(resp!.status!.state, 'DONE'); done(); }); }); @@ -775,7 +958,7 @@ describe('BigQuery', () => { it('should load data from a file via promises', () => { return table.load(file).then(results => { const metadata = results[0]; - assert.strictEqual(metadata.status.state, 'DONE'); + assert.strictEqual(metadata!.status!.state, 'DONE'); }); }); @@ -789,14 +972,14 @@ describe('BigQuery', () => { }; table.insert([data, improperData], e => { - const err = e as {} as GoogleErrorBody; - // tslint:disable-next-line no-any + const err = (e as {}) as GoogleErrorBody; + // eslint-disable-next-line @typescript-eslint/no-explicit-any assert.strictEqual((err as any).name, 'PartialFailureError'); assert.deepStrictEqual(err.errors![0], { errors: [ { - message: 'Conversion from bool to string is unsupported.', + message: 'Conversion from bool to std::string is unsupported.', reason: 'invalid', }, ], @@ -825,457 +1008,521 @@ describe('BigQuery', () => { }; const options = { - autoCreate: true, schema: SCHEMA, }; - return table.insert(row, options) - .then(() => { - // getting rows immediately after insert - // results in an empty array - return new Promise(resolve => { - setTimeout(resolve, 2500); - }); - }) - .then(() => { - return table.getRows(); - }) - .then(data => { - const rows = data[0]; - - assert.strictEqual(rows!.length, 1); - assert.strictEqual(rows[0].name, row.name); + return table + .insert(row, options) + .then(() => { + // getting rows immediately after insert + // results in an empty array + return new Promise(resolve => { + setTimeout(resolve, 2500); }); + }) + .then(() => { + return table.getRows(); + }) + .then(data => { + const rows = data[0]; + + assert.strictEqual(rows!.length, 1); + assert.strictEqual(rows[0].name, row.name); + }); }); describe('SQL parameters', () => { describe('positional', () => { it('should work with strings', done => { bigquery.query( - { - query: [ - 'SELECT url', - 'FROM `publicdata.samples.github_nested`', - 'WHERE repository.owner = ?', - 'LIMIT 1', - ].join(' '), - params: ['google'], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: [ + 'SELECT url', + 'FROM `publicdata.samples.github_nested`', + 'WHERE repository.owner = ?', + 'LIMIT 1', + ].join(' '), + params: ['google'], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with ints', done => { bigquery.query( - { - query: [ - 'SELECT url', - 'FROM `publicdata.samples.github_nested`', - 'WHERE repository.forks > ?', - 'LIMIT 1', - ].join(' '), - params: [1], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: [ + 'SELECT url', + 'FROM `publicdata.samples.github_nested`', + 'WHERE repository.forks > ?', + 'LIMIT 1', + ].join(' '), + params: [1], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with floats', done => { bigquery.query( - { - query: [ - 'SELECT snow_depth', - 'FROM `publicdata.samples.gsod`', - 'WHERE snow_depth >= ?', - 'LIMIT 1', - ].join(' '), - params: [12.5], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: [ + 'SELECT snow_depth', + 'FROM `publicdata.samples.gsod`', + 'WHERE snow_depth >= ?', + 'LIMIT 1', + ].join(' '), + params: [12.5], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with numerics', done => { bigquery.query( - { - query: [ - 'SELECT x', - 'FROM UNNEST([NUMERIC "1", NUMERIC "2", NUMERIC "3"]) x', - 'WHERE x = ?', - ].join(' '), - params: [new Big('2')], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: [ + 'SELECT x', + 'FROM UNNEST([NUMERIC "1", NUMERIC "2", NUMERIC "3"]) x', + 'WHERE x = ?', + ].join(' '), + params: [new Big('2')], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with booleans', done => { bigquery.query( - { - query: [ - 'SELECT url', - 'FROM `publicdata.samples.github_nested`', - 'WHERE public = ?', - 'LIMIT 1', - ].join(' '), - params: [true], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: [ + 'SELECT url', + 'FROM `publicdata.samples.github_nested`', + 'WHERE public = ?', + 'LIMIT 1', + ].join(' '), + params: [true], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with arrays', done => { bigquery.query( - { - query: 'SELECT * FROM UNNEST (?)', - params: [[25, 26, 27, 28, 29]], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 5); - done(); - }); + { + query: 'SELECT * FROM UNNEST (?)', + params: [[25, 26, 27, 28, 29]], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 5); + done(); + } + ); + }); + + it('should work with empty arrays', async () => { + const [rows] = await bigquery.query({ + query: 'SELECT * FROM UNNEST (?)', + params: [[]], + types: [['INT64']], + }); + assert.strictEqual(rows.length, 0); }); it('should work with structs', done => { bigquery.query( - { - query: 'SELECT ? obj', - params: [ - { - b: true, - arr: [2, 3, 4], - d: bigquery.date('2016-12-7'), - f: 3.14, - nested: { - a: 3, - }, + { + query: 'SELECT ? obj', + params: [ + { + b: true, + arr: [2, 3, 4], + d: bigquery.date('2016-12-7'), + f: 3.14, + nested: { + a: 3, }, - ], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + ], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with TIMESTAMP types', done => { bigquery.query( - { - query: [ - 'SELECT subject', - 'FROM `bigquery-public-data.github_repos.commits`', - 'WHERE author.date < ?', - 'LIMIT 1', - ].join(' '), - params: [new Date()], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: 'SELECT ? timestamp', + params: [new Date()], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with DATE types', done => { bigquery.query( - { - query: 'SELECT ? date', - params: [bigquery.date('2016-12-7')], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: 'SELECT ? date', + params: [bigquery.date('2016-12-7')], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with DATETIME types', done => { bigquery.query( - { - query: 'SELECT ? datetime', - params: [bigquery.datetime('2016-12-7 14:00:00')], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: 'SELECT ? datetime', + params: [bigquery.datetime('2016-12-7 14:00:00')], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with TIME types', done => { bigquery.query( - { - query: 'SELECT ? time', - params: [bigquery.time('14:00:00')], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: 'SELECT ? time', + params: [bigquery.time('14:00:00')], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); + }); + + it('should work with GEOGRAPHY types', done => { + bigquery.query( + { + query: 'SELECT ? geography', + params: [bigquery.geography('POINT(1 2)')], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with multiple types', done => { bigquery.query( - { - query: [ - 'SELECT url FROM `publicdata.samples.github_nested`', - 'WHERE repository.owner = ?', - 'AND repository.forks > ?', - 'AND public = ?', - 'LIMIT 1', - ].join(' '), - params: ['google', 1, true], - }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + { + query: [ + 'SELECT url FROM `publicdata.samples.github_nested`', + 'WHERE repository.owner = ?', + 'AND repository.forks > ?', + 'AND public = ?', + 'LIMIT 1', + ].join(' '), + params: ['google', 1, true], + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); }); describe('named', () => { it('should work with strings', done => { bigquery.query( - { - query: [ - 'SELECT url FROM `publicdata.samples.github_nested`', - 'WHERE repository.owner = @owner', - 'LIMIT 1', - ].join(' '), - params: { - owner: 'google', - }, + { + query: [ + 'SELECT url FROM `publicdata.samples.github_nested`', + 'WHERE repository.owner = @owner', + 'LIMIT 1', + ].join(' '), + params: { + owner: 'google', }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with ints', done => { bigquery.query( - { - query: [ - 'SELECT url', - 'FROM `publicdata.samples.github_nested`', - 'WHERE repository.forks > @forks', - 'LIMIT 1', - ].join(' '), - params: { - forks: 1, - }, + { + query: [ + 'SELECT url', + 'FROM `publicdata.samples.github_nested`', + 'WHERE repository.forks > @forks', + 'LIMIT 1', + ].join(' '), + params: { + forks: 1, }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with floats', done => { bigquery.query( - { - query: [ - 'SELECT snow_depth', - 'FROM `publicdata.samples.gsod`', - 'WHERE snow_depth >= @depth', - 'LIMIT 1', - ].join(' '), - params: { - depth: 12.5, - }, + { + query: [ + 'SELECT snow_depth', + 'FROM `publicdata.samples.gsod`', + 'WHERE snow_depth >= @depth', + 'LIMIT 1', + ].join(' '), + params: { + depth: 12.5, }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with numerics', done => { bigquery.query( - { - query: [ - 'SELECT x', - 'FROM UNNEST([NUMERIC "1", NUMERIC "2", NUMERIC "3"]) x', - 'WHERE x = @num', - ].join(' '), - params: { - num: new Big('2'), - }, + { + query: [ + 'SELECT x', + 'FROM UNNEST([NUMERIC "1", NUMERIC "2", NUMERIC "3"]) x', + 'WHERE x = @num', + ].join(' '), + params: { + num: new Big('2'), }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with booleans', done => { bigquery.query( - { - query: [ - 'SELECT url', - 'FROM `publicdata.samples.github_nested`', - 'WHERE public = @isPublic', - 'LIMIT 1', - ].join(' '), - params: { - isPublic: true, - }, + { + query: [ + 'SELECT url', + 'FROM `publicdata.samples.github_nested`', + 'WHERE public = @isPublic', + 'LIMIT 1', + ].join(' '), + params: { + isPublic: true, }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with arrays', done => { bigquery.query( - { - query: 'SELECT * FROM UNNEST (@nums)', - params: { - nums: [25, 26, 27, 28, 29], - }, + { + query: 'SELECT * FROM UNNEST (@nums)', + params: { + nums: [25, 26, 27, 28, 29], }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 5); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 5); + done(); + } + ); + }); + + it('should work with empty arrays', async () => { + const [rows] = await bigquery.query({ + query: 'SELECT * FROM UNNEST (@nums)', + params: { + nums: [], + }, + types: {nums: ['INT64']}, + }); + assert.strictEqual(rows.length, 0); }); it('should work with structs', done => { bigquery.query( - { - query: 'SELECT @obj obj', - params: { - obj: { - b: true, - arr: [2, 3, 4], - d: bigquery.date('2016-12-7'), - f: 3.14, - nested: { - a: 3, - }, + { + query: 'SELECT @obj obj', + params: { + obj: { + b: true, + arr: [2, 3, 4], + d: bigquery.date('2016-12-7'), + f: 3.14, + nested: { + a: 3, }, }, }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with TIMESTAMP types', done => { bigquery.query( - { - query: [ - 'SELECT subject', - 'FROM `bigquery-public-data.github_repos.commits`', - 'WHERE author.date < @time', - 'LIMIT 1', - ].join(' '), - params: { - time: new Date(), - }, + { + query: 'SELECT @time timestamp', + params: { + time: new Date(), }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with DATE types', done => { bigquery.query( - { - query: 'SELECT @date date', - params: { - date: bigquery.date('2016-12-7'), - }, + { + query: 'SELECT @date date', + params: { + date: bigquery.date('2016-12-7'), }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with DATETIME types', done => { bigquery.query( - { - query: 'SELECT @datetime datetime', - params: { - datetime: bigquery.datetime('2016-12-7 14:00:00'), - }, + { + query: 'SELECT @datetime datetime', + params: { + datetime: bigquery.datetime('2016-12-7 14:00:00'), }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with TIME types', done => { bigquery.query( - { - query: 'SELECT @time time', - params: { - time: bigquery.time('14:00:00'), - }, + { + query: 'SELECT @time time', + params: { + time: bigquery.time('14:00:00'), + }, + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); + }); + + it('should work with GEOGRAPHY types', done => { + bigquery.query( + { + query: 'SELECT @place geography', + params: { + place: bigquery.geography('POINT(1 2)'), }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); it('should work with multiple types', done => { bigquery.query( - { - query: [ - 'SELECT url', - 'FROM `publicdata.samples.github_nested`', - 'WHERE repository.owner = @owner', - 'AND repository.forks > @forks', - 'AND public = @isPublic', - 'LIMIT 1', - ].join(' '), - params: { - owner: 'google', - forks: 1, - isPublic: true, - }, + { + query: [ + 'SELECT url', + 'FROM `publicdata.samples.github_nested`', + 'WHERE repository.owner = @owner', + 'AND repository.forks > @forks', + 'AND public = @isPublic', + 'LIMIT 1', + ].join(' '), + params: { + owner: 'google', + forks: 1, + isPublic: true, }, - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows!.length, 1); - done(); - }); + }, + (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows!.length, 1); + done(); + } + ); }); }); }); @@ -1294,7 +1541,7 @@ describe('BigQuery', () => { const file = bucket.file('kitten-test-data-backup.json'); table.extract(file, (err, resp) => { assert.ifError(err); - assert.strictEqual(resp.status.state, 'DONE'); + assert.strictEqual(resp!.status!.state, 'DONE'); done(); }); }); @@ -1309,6 +1556,7 @@ describe('BigQuery', () => { const TIME = bigquery.time('14:00:00'); const TIMESTAMP = bigquery.timestamp(new Date()); const NUMERIC = new Big('123.456'); + const GEOGRAPHY = bigquery.geography('POINT(1 2)'); before(() => { table = dataset.table(generateName('table')); @@ -1319,6 +1567,7 @@ describe('BigQuery', () => { 'time:TIME', 'timestamp:TIMESTAMP', 'numeric:NUMERIC', + 'geography:GEOGRAPHY', ].join(', '), }); }); @@ -1330,13 +1579,16 @@ describe('BigQuery', () => { time: TIME, timestamp: TIMESTAMP, numeric: NUMERIC, + geography: GEOGRAPHY, }); }); }); describe('Provided Tests', () => { const table = dataset.table(generateName('table')); + // eslint-disable-next-line @typescript-eslint/no-var-requires const schema = require('../../system-test/data/schema.json'); + // eslint-disable-next-line @typescript-eslint/no-var-requires const testData = require('../../system-test/data/schema-test-data.json'); const EXPECTED_ROWS = { @@ -1431,8 +1683,9 @@ describe('BigQuery', () => { } rows!.forEach(row => { - const expectedRow = - (EXPECTED_ROWS as {[index: string]: {}})[row.Name]; + const expectedRow = (EXPECTED_ROWS as {[index: string]: {}})[ + row.Name + ]; assert.deepStrictEqual(row, expectedRow); }); @@ -1443,12 +1696,14 @@ describe('BigQuery', () => { function generateName(resourceType: string) { return `${GCLOUD_TESTS_PREFIX}_${resourceType}_${uuid.v1()}`.replace( - /-/g, '_'); + /-/g, + '_' + ); } // Only delete a resource if it is older than 24 hours. That will prevent // collisions with parallel CI test runs. - function isResourceStale(creationTime: number|string) { + function isResourceStale(creationTime: number | string) { const oneDayMs = 86400000; const now = new Date(); const created = new Date(creationTime); @@ -1460,13 +1715,13 @@ describe('BigQuery', () => { prefix: GCLOUD_TESTS_PREFIX, }); - const deleteBucketPromises = - buckets.filter(bucket => isResourceStale(bucket.metadata.timeCreated)) - .map(async b => { - const [files] = await b.getFiles(); - await Promise.all(files.map(f => f.delete())); - await b.delete(); - }); + const deleteBucketPromises = buckets + .filter(bucket => isResourceStale(bucket.metadata.timeCreated)) + .map(async b => { + const [files] = await b.getFiles(); + await Promise.all(files.map(f => f.delete())); + await b.delete(); + }); await Promise.all(deleteBucketPromises); } @@ -1476,16 +1731,18 @@ describe('BigQuery', () => { filter: `labels.${GCLOUD_TESTS_PREFIX}`, }); - const deleteDatasetPromises = - datasets - .filter(dataset => { - const creationTime = dataset.metadata.creationTime; - return creationTime && isResourceStale(creationTime); - }) - .map(dataset => { - return dataset.delete({force: true}); - }); + for (const dataset of datasets) { + const [metadata] = await dataset.getMetadata(); + const creationTime = Number(metadata.creationTime); - await Promise.all(deleteDatasetPromises); + if (isResourceStale(creationTime)) { + try { + await dataset.delete({force: true}); + } catch (e) { + console.log(`dataset(${dataset.id}).delete() failed`); + console.log(e); + } + } + } } }); diff --git a/system-test/fixtures/kitchen/package.json b/system-test/fixtures/kitchen/package.json index cb1defa0..b48091aa 100644 --- a/system-test/fixtures/kitchen/package.json +++ b/system-test/fixtures/kitchen/package.json @@ -16,8 +16,8 @@ "@google-cloud/bigquery": "file:./google-cloud-bigquery.tgz" }, "devDependencies": { - "@types/node": "^10.3.0", + "@types/node": "^14.0.0", "typescript": "^3.0.0", - "gts": "^0.9.0" + "gts": "^2.0.0" } } diff --git a/system-test/fixtures/kitchen/src/index.ts b/system-test/fixtures/kitchen/src/index.ts index 8e06b4ac..00523547 100644 --- a/system-test/fixtures/kitchen/src/index.ts +++ b/system-test/fixtures/kitchen/src/index.ts @@ -1,2 +1,3 @@ import {BigQuery} from '@google-cloud/bigquery'; const bq = new BigQuery(); +console.log(bq); diff --git a/system-test/install.ts b/system-test/install.ts index b2f5d520..d16339ae 100644 --- a/system-test/install.ts +++ b/system-test/install.ts @@ -1,31 +1,31 @@ -/** - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. import * as execa from 'execa'; import * as mv from 'mv'; import {ncp} from 'ncp'; import * as tmp from 'tmp'; import {promisify} from 'util'; +import {describe, it, before, after} from 'mocha'; const RUNNING_IN_VPCSC = !!process.env['GOOGLE_CLOUD_TESTS_IN_VPCSC']; -const mvp = promisify(mv) as {} as (...args: string[]) => Promise; +const mvp = (promisify(mv) as {}) as (...args: string[]) => Promise; const ncpp = promisify(ncp); const stagingDir = tmp.dirSync({unsafeCleanup: true}); const stagingPath = stagingDir.name; +// eslint-disable-next-line @typescript-eslint/no-var-requires const pkg = require('../../package.json'); describe('Installation test', () => { diff --git a/test/.eslintrc.yml b/test/.eslintrc.yml deleted file mode 100644 index 73f7bbc9..00000000 --- a/test/.eslintrc.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -env: - mocha: true -rules: - node/no-unpublished-require: off diff --git a/test/index.ts b/test/bigquery.ts similarity index 50% rename from test/index.ts rename to test/bigquery.ts index 8f6c6e73..5bfefbc9 100644 --- a/test/index.ts +++ b/test/bigquery.ts @@ -1,44 +1,67 @@ -/** - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {DecorateRequestOptions, Service, ServiceConfig, ServiceOptions, util} from '@google-cloud/common'; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + DecorateRequestOptions, + Service, + ServiceConfig, + ServiceOptions, + util, +} from '@google-cloud/common'; import * as pfy from '@google-cloud/promisify'; -import * as arrify from 'arrify'; +import arrify = require('arrify'); import * as assert from 'assert'; +import {describe, it, after, afterEach, before, beforeEach} from 'mocha'; import Big from 'big.js'; import * as extend from 'extend'; import * as proxyquire from 'proxyquire'; import * as sinon from 'sinon'; import * as uuid from 'uuid'; -import {BigQueryDate, Dataset, Job, Query, Table} from '../src'; -import {JobOptions} from '../src/job'; -import {TableField} from '../src/table'; +import { + BigQueryInt, + BigQueryDate, + IntegerTypeCastValue, + IntegerTypeCastOptions, + Dataset, + Job, + PROTOCOL_REGEX, + Table, + JobOptions, + TableField, +} from '../src'; +import {SinonStub} from 'sinon'; const fakeUuid = extend(true, {}, uuid); class FakeApiError { - calledWith_: IArguments; - constructor() { - this.calledWith_ = arguments; + calledWith_: Array<{}>; + constructor(...args: Array<{}>) { + this.calledWith_ = args; } } +interface CalledWithService extends Service { + calledWith_: Array<{ + baseUrl: string; + scopes: string[]; + packageJson: {}; + }>; +} + let promisified = false; -const fakePfy = extend({}, pfy, { +const fakePfy = Object.assign({}, pfy, { promisifyAll: (c: Function, options: pfy.PromisifyAllOptions) => { if (c.name !== 'BigQuery') { return; @@ -48,21 +71,23 @@ const fakePfy = extend({}, pfy, { 'dataset', 'date', 'datetime', + 'geography', + 'int', 'job', 'time', 'timestamp', ]); }, }); -const fakeUtil = extend({}, util, { +const fakeUtil = Object.assign({}, util, { ApiError: FakeApiError, }); const originalFakeUtil = extend(true, {}, fakeUtil); class FakeDataset { - calledWith_: IArguments; - constructor() { - this.calledWith_ = arguments; + calledWith_: Array<{}>; + constructor(...args: Array<{}>) { + this.calledWith_ = args; } } @@ -73,9 +98,9 @@ class FakeTable extends Table { } class FakeJob { - calledWith_: IArguments; - constructor() { - this.calledWith_ = arguments; + calledWith_: Array<{}>; + constructor(...args: Array<{}>) { + this.calledWith_ = args; } } @@ -94,13 +119,14 @@ const fakePaginator = { streamify: (methodName: string) => { return methodName; }, - } + }, }; class FakeService extends Service { calledWith_: IArguments; constructor(config: ServiceConfig, options: ServiceOptions) { super(config, options); + // eslint-disable-next-line prefer-rest-params this.calledWith_ = arguments; } } @@ -113,44 +139,53 @@ describe('BigQuery', () => { const PROJECT_ID = 'test-project'; const LOCATION = 'asia-northeast1'; - // tslint:disable-next-line no-any variable-name + // eslint-disable-next-line @typescript-eslint/no-explicit-any let BigQueryCached: any; - // tslint:disable-next-line no-any variable-name + // eslint-disable-next-line @typescript-eslint/no-explicit-any let BigQuery: any; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let bq: any; + const BIGQUERY_EMULATOR_HOST = process.env.BIGQUERY_EMULATOR_HOST; + before(() => { - BigQuery = proxyquire('../src', { - uuid: fakeUuid, - './dataset': { - Dataset: FakeDataset, - }, - './job': { - Job: FakeJob, - }, - './table': { - Table: FakeTable, - }, - '@google-cloud/common': { - Service: FakeService, - util: fakeUtil, - }, - '@google-cloud/paginator': fakePaginator, - '@google-cloud/promisify': fakePfy, - }).BigQuery; - BigQueryCached = extend({}, BigQuery); + delete process.env.BIGQUERY_EMULATOR_HOST; + BigQuery = proxyquire('../src/bigquery', { + uuid: fakeUuid, + './dataset': { + Dataset: FakeDataset, + }, + './job': { + Job: FakeJob, + }, + './table': { + Table: FakeTable, + }, + '@google-cloud/common': { + Service: FakeService, + util: fakeUtil, + }, + '@google-cloud/paginator': fakePaginator, + '@google-cloud/promisify': fakePfy, + }).BigQuery; + BigQueryCached = Object.assign({}, BigQuery); }); beforeEach(() => { - extend(fakeUtil, originalFakeUtil); - BigQuery = extend(BigQuery, BigQueryCached); + Object.assign(fakeUtil, originalFakeUtil); + BigQuery = Object.assign(BigQuery, BigQueryCached); bq = new BigQuery({projectId: PROJECT_ID}); }); + after(() => { + if (BIGQUERY_EMULATOR_HOST) { + process.env.BIGQUERY_EMULATOR_HOST = BIGQUERY_EMULATOR_HOST; + } + }); + describe('instantiation', () => { it('should extend the correct methods', () => { - assert(extended); // See `fakePaginator.extend` + assert(extended); // See `fakePaginator.extend` }); it('should streamify the correct methods', () => { @@ -166,15 +201,54 @@ describe('BigQuery', () => { it('should inherit from Service', () => { assert(bq instanceof Service); - const calledWith = bq.calledWith_[0]; + const calledWith = (bq as CalledWithService).calledWith_[0]; - const baseUrl = 'https://www.googleapis.com/bigquery/v2'; + const baseUrl = 'https://bigquery.googleapis.com/bigquery/v2'; assert.strictEqual(calledWith.baseUrl, baseUrl); assert.deepStrictEqual(calledWith.scopes, [ 'https://www.googleapis.com/auth/bigquery', ]); assert.deepStrictEqual( - calledWith.packageJson, require('../../package.json')); + calledWith.packageJson, + // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../../package.json') + ); + }); + + it('should allow overriding the apiEndpoint', () => { + const apiEndpoint = 'https://not.real.local'; + bq = new BigQuery({ + apiEndpoint, + }); + const calledWith = bq.calledWith_[0]; + assert.strictEqual(calledWith.baseUrl, `${apiEndpoint}/bigquery/v2`); + assert.strictEqual(calledWith.apiEndpoint, `${apiEndpoint}`); + }); + + it('should prepend apiEndpoint with default protocol', () => { + const protocollessApiEndpoint = 'some.fake.endpoint'; + bq = new BigQuery({ + apiEndpoint: protocollessApiEndpoint, + }); + const calledWith = bq.calledWith_[0]; + assert.strictEqual( + calledWith.baseUrl, + `https://${protocollessApiEndpoint}/bigquery/v2` + ); + assert.strictEqual( + calledWith.apiEndpoint, + `https://${protocollessApiEndpoint}` + ); + }); + + it('should strip trailing slash from apiEndpoint', () => { + const apiEndpoint = 'https://some.fake.endpoint/'; + bq = new BigQuery({ + apiEndpoint: apiEndpoint, + }); + const calledWith = bq.calledWith_[0]; + assert.strictEqual(calledWith.baseUrl, `${apiEndpoint}bigquery/v2`); + assert.strictEqual(calledWith.apiEndpoint, 'https://some.fake.endpoint'); }); it('should capture any user specified location', () => { @@ -182,7 +256,6 @@ describe('BigQuery', () => { projectId: PROJECT_ID, location: LOCATION, }); - assert.strictEqual(bq.location, LOCATION); }); @@ -199,6 +272,93 @@ describe('BigQuery', () => { const calledWith = bq.calledWith_[0]; assert.deepStrictEqual(calledWith.scopes, expectedScopes); }); + + it('should not modify options argument', () => { + const options = { + projectId: PROJECT_ID, + }; + const expectedCalledWith = Object.assign({}, options, { + apiEndpoint: 'https://bigquery.googleapis.com', + }); + const bigquery = new BigQuery(options); + const calledWith = bigquery.calledWith_[1]; + assert.notStrictEqual(calledWith, options); + assert.notDeepStrictEqual(calledWith, options); + assert.deepStrictEqual(calledWith, expectedCalledWith); + }); + + describe('BIGQUERY_EMULATOR_HOST', () => { + const EMULATOR_HOST = 'https://internal.benchmark.com/path'; + + before(() => { + process.env.BIGQUERY_EMULATOR_HOST = EMULATOR_HOST; + }); + + after(() => { + delete process.env.BIGQUERY_EMULATOR_HOST; + }); + + it('should set baseUrl to env var STORAGE_EMULATOR_HOST', () => { + bq = new BigQuery({ + projectId: PROJECT_ID, + }); + + const calledWith = bq.calledWith_[0]; + assert.strictEqual(calledWith.baseUrl, EMULATOR_HOST); + assert.strictEqual( + calledWith.apiEndpoint, + 'https://internal.benchmark.com/path' + ); + }); + + it('should be overriden by apiEndpoint', () => { + bq = new BigQuery({ + projectId: PROJECT_ID, + apiEndpoint: 'https://some.api.com', + }); + + const calledWith = bq.calledWith_[0]; + assert.strictEqual(calledWith.baseUrl, EMULATOR_HOST); + assert.strictEqual(calledWith.apiEndpoint, 'https://some.api.com'); + }); + + it('should prepend default protocol and strip trailing slash', () => { + const EMULATOR_HOST = 'internal.benchmark.com/path/'; + process.env.BIGQUERY_EMULATOR_HOST = EMULATOR_HOST; + + bq = new BigQuery({ + projectId: PROJECT_ID, + }); + + const calledWith = bq.calledWith_[0]; + assert.strictEqual(calledWith.baseUrl, EMULATOR_HOST); + assert.strictEqual( + calledWith.apiEndpoint, + 'https://internal.benchmark.com/path' + ); + }); + }); + + describe('prettyPrint request interceptor', () => { + let requestInterceptor: Function; + + beforeEach(() => { + requestInterceptor = bq.interceptors.pop().request; + }); + + it('should disable prettyPrint', () => { + assert.deepStrictEqual(requestInterceptor({}), { + qs: {prettyPrint: false}, + }); + }); + + it('should clone json', () => { + const reqOpts = {qs: {a: 'b'}}; + const expectedReqOpts = {qs: {a: 'b', prettyPrint: false}}; + assert.deepStrictEqual(requestInterceptor(reqOpts), expectedReqOpts); + assert.notDeepStrictEqual(reqOpts, expectedReqOpts); + }); + }); }); describe('mergeSchemaWithRows_', () => { @@ -212,6 +372,7 @@ describe('BigQuery', () => { {name: 'hair_count', type: 'FLOAT'}, {name: 'teeth_count', type: 'FLOAT64'}, {name: 'numeric_col', type: 'NUMERIC'}, + {name: 'bignumeric_col', type: 'BIGNUMERIC'}, ], } as {fields: TableField[]}; @@ -243,6 +404,13 @@ describe('BigQuery', () => { input, }; }); + + sandbox.stub(BigQuery, 'geography').callsFake(input => { + return { + type: 'fakeGeography', + input, + }; + }); }); it('should merge the schema and flatten the rows', () => { @@ -261,6 +429,7 @@ describe('BigQuery', () => { {v: '5.222330009847'}, {v: '30.2232138'}, {v: '3.14'}, + {v: '9.9876543210123456789'}, { v: [ { @@ -299,6 +468,7 @@ describe('BigQuery', () => { {v: 'date-input'}, {v: 'datetime-input'}, {v: 'time-input'}, + {v: 'geography-input'}, ], }, expected: { @@ -313,6 +483,7 @@ describe('BigQuery', () => { hair_count: 5.222330009847, teeth_count: 30.2232138, numeric_col: new Big(3.14), + bignumeric_col: new Big('9.9876543210123456789'), arr: [10], arr2: [2], nullable: null, @@ -336,6 +507,10 @@ describe('BigQuery', () => { input: 'time-input', type: 'fakeTime', }, + geography: { + input: 'geography-input', + type: 'fakeGeography', + }, }, }, ]; @@ -398,13 +573,64 @@ describe('BigQuery', () => { type: 'TIME', }); + schemaObject.fields.push({ + name: 'geography', + type: 'GEOGRAPHY', + }); + const rawRows = rows.map(x => x.raw); - const mergedRows = BigQuery.mergeSchemaWithRows_(schemaObject, rawRows); + const mergedRows = BigQuery.mergeSchemaWithRows_( + schemaObject, + rawRows, + false + ); mergedRows.forEach((mergedRow: {}, index: number) => { assert.deepStrictEqual(mergedRow, rows[index].expected); }); }); + + it('should wrap integers with option', () => { + const wrapIntegersBoolean = true; + const wrapIntegersObject = {}; + const fakeInt = new BigQueryInt(100); + + const SCHEMA_OBJECT = { + fields: [{name: 'fave_number', type: 'INTEGER'}], + } as {fields: TableField[]}; + + const rows = { + raw: { + f: [{v: 100}], + }, + expectedBool: { + fave_number: fakeInt, + }, + expectedObj: { + fave_number: fakeInt.valueOf(), + }, + }; + + sandbox.stub(BigQuery, 'int').returns(fakeInt); + + let mergedRows = BigQuery.mergeSchemaWithRows_( + SCHEMA_OBJECT, + rows.raw, + wrapIntegersBoolean + ); + mergedRows.forEach((mergedRow: {}) => { + assert.deepStrictEqual(mergedRow, rows.expectedBool); + }); + + mergedRows = BigQuery.mergeSchemaWithRows_( + SCHEMA_OBJECT, + rows.raw, + wrapIntegersObject + ); + mergedRows.forEach((mergedRow: {}) => { + assert.deepStrictEqual(mergedRow, rows.expectedObj); + }); + }); }); describe('date', () => { @@ -415,6 +641,7 @@ describe('BigQuery', () => { day: 1, }; + // tslint:disable-next-line ban it.skip('should expose static and instance constructors', () => { const staticD = BigQuery.date(); assert(staticD instanceof BigQueryDate); @@ -456,6 +683,7 @@ describe('BigQuery', () => { const EXPECTED_VALUE = '2017-1-1 14:2:38.883388'; + // tslint:disable-next-line ban it.skip('should expose static and instance constructors', () => { const staticDt = BigQuery.datetime(INPUT_OBJ); assert(staticDt instanceof BigQuery.datetime); @@ -501,6 +729,7 @@ describe('BigQuery', () => { fractional: 883388, }; + // tslint:disable-next-line ban it.skip('should expose static and instance constructors', () => { const staticT = BigQuery.time(); assert(staticT instanceof BigQuery.time); @@ -534,7 +763,7 @@ describe('BigQuery', () => { }); it('should not include fractional digits if not provided', () => { - const input = extend({}, INPUT_OBJ); + const input = Object.assign({}, INPUT_OBJ); delete input.fractional; const time = bq.time(input); @@ -547,6 +776,7 @@ describe('BigQuery', () => { const INPUT_DATE = new Date(INPUT_STRING); const EXPECTED_VALUE = INPUT_DATE.toJSON(); + // tslint:disable-next-line ban it.skip('should expose static and instance constructors', () => { const staticT = BigQuery.timestamp(INPUT_DATE); assert(staticT instanceof BigQuery.timestamp); @@ -573,22 +803,278 @@ describe('BigQuery', () => { }); }); - describe('getType_', () => { + describe('geography', () => { + const INPUT_STRING = 'POINT(1 2)'; + + it('should have the correct constructor name', () => { + const geography = BigQuery.geography(INPUT_STRING); + assert.strictEqual(geography.constructor.name, 'Geography'); + }); + + it('should accept a string', () => { + const geography = BigQuery.geography(INPUT_STRING); + assert.strictEqual(geography.value, INPUT_STRING); + }); + + it('should call through to the static method', () => { + const fakeGeography = {value: 'foo'}; + + sandbox + .stub(BigQuery, 'geography') + .withArgs(INPUT_STRING) + .returns(fakeGeography); + + const geography = bq.geography(INPUT_STRING); + assert.strictEqual(geography, fakeGeography); + }); + }); + + describe('int', () => { + const INPUT_STRING = '100'; + + it('should call through to the static method', () => { + const fakeInt = new BigQueryInt(INPUT_STRING); + + sandbox + .stub(BigQuery, 'int') + .withArgs(INPUT_STRING) + .returns(fakeInt); + + const int = bq.int(INPUT_STRING); + assert.strictEqual(int, fakeInt); + }); + + it('should have the correct constructor name', () => { + const int = BigQuery.int(INPUT_STRING); + assert.strictEqual(int.constructor.name, 'BigQueryInt'); + }); + }); + + describe('BigQueryInt', () => { + it('should store the stringified value', () => { + const INPUT_NUM = 100; + const int = new BigQueryInt(INPUT_NUM); + assert.strictEqual(int.value, INPUT_NUM.toString()); + }); + + describe('valueOf', () => { + let valueObject: IntegerTypeCastValue; + + beforeEach(() => { + valueObject = { + integerValue: 8, + }; + }); + + describe('integerTypeCastFunction is not provided', () => { + const expectedError = (opts: { + integerValue: string | number; + schemaFieldName?: string; + }) => { + return new Error( + 'We attempted to return all of the numeric values, but ' + + (opts.schemaFieldName ? opts.schemaFieldName + ' ' : '') + + 'value ' + + opts.integerValue + + " is out of bounds of 'Number.MAX_SAFE_INTEGER'.\n" + + "To prevent this error, please consider passing 'options.wrapNumbers' as\n" + + '{\n' + + ' integerTypeCastFunction: provide \n' + + ' fields: optionally specify field name(s) to be custom casted\n' + + '}\n' + ); + }; + + it('should throw if integerTypeCastOptions is provided but integerTypeCastFunction is not', () => { + assert.throws( + () => + new BigQueryInt( + valueObject, + {} as IntegerTypeCastOptions + ).valueOf(), + /integerTypeCastFunction is not a function or was not provided\./ + ); + }); + + it('should throw if integer value is outside of bounds passing objects', () => { + const largeIntegerValue = (Number.MAX_SAFE_INTEGER + 1).toString(); + const smallIntegerValue = (Number.MIN_SAFE_INTEGER - 1).toString(); + + const valueObject = { + integerValue: largeIntegerValue, + schemaFieldName: 'field', + }; + + const valueObject2 = { + integerValue: smallIntegerValue, + schemaFieldName: 'field', + }; + + assert.throws(() => { + new BigQueryInt(valueObject).valueOf(); + }, expectedError(valueObject)); + + assert.throws(() => { + new BigQueryInt(valueObject2).valueOf(); + }, expectedError(valueObject2)); + }); + + it('should throw if integer value is outside of bounds passing strings or Numbers', () => { + const largeIntegerValue = Number.MAX_SAFE_INTEGER + 1; + const smallIntegerValue = Number.MIN_SAFE_INTEGER - 1; + + // should throw when Number is passed + assert.throws(() => { + new BigQueryInt(largeIntegerValue).valueOf(); + }, expectedError({integerValue: largeIntegerValue})); + + // should throw when string is passed + assert.throws(() => { + new BigQueryInt(smallIntegerValue.toString()).valueOf(); + }, expectedError({integerValue: smallIntegerValue})); + }); + + it('should not auto throw on initialization', () => { + const largeIntegerValue = Number.MAX_SAFE_INTEGER + 1; + + const valueObject = { + integerValue: largeIntegerValue, + }; + + assert.doesNotThrow(() => { + new BigQueryInt(valueObject); + }, new RegExp(`Integer value ${largeIntegerValue} is out of bounds.`)); + }); + + describe('integerTypeCastFunction is provided', () => { + it('should throw if integerTypeCastFunction is not a function', () => { + assert.throws( + () => + new BigQueryInt(valueObject, { + integerTypeCastFunction: {} as Function, + }).valueOf(), + /integerTypeCastFunction is not a function or was not provided\./ + ); + }); + + it('should custom-cast value when integerTypeCastFunction is provided', () => { + const stub = sinon.stub(); + + new BigQueryInt(valueObject, { + integerTypeCastFunction: stub, + }).valueOf(); + assert.ok(stub.calledOnce); + }); + + it('should custom-cast value if in `fields` specified by user', () => { + const stub = sinon.stub(); + + Object.assign(valueObject, { + schemaFieldName: 'funField', + }); + + new BigQueryInt(valueObject, { + integerTypeCastFunction: stub, + fields: 'funField', + }).valueOf(); + assert.ok(stub.calledOnce); + }); + + it('should not custom-cast value if not in `fields` specified by user', () => { + const stub = sinon.stub(); + + Object.assign(valueObject, { + schemaFieldName: 'funField', + }); + + new BigQueryInt(valueObject, { + integerTypeCastFunction: stub, + fields: 'unFunField', + }).valueOf(); + assert.ok(stub.notCalled); + }); + + it('should catch integerTypeCastFunction error and throw', () => { + const error = new Error('My bad!'); + const stub = sinon.stub().throws(error); + assert.throws( + () => + new BigQueryInt(valueObject, { + integerTypeCastFunction: stub, + }).valueOf(), + /integerTypeCastFunction threw an error:/ + ); + }); + }); + }); + + describe('toJSON', () => { + it('should return correct JSON', () => { + const expected = {type: 'BigQueryInt', value: '8'}; + const JSON = new BigQueryInt(valueObject).toJSON(); + assert.deepStrictEqual(JSON, expected); + }); + }); + }); + }); + + describe('getTypeDescriptorFromValue_', () => { it('should return correct types', () => { - assert.strictEqual(BigQuery.getType_(bq.date()).type, 'DATE'); - assert.strictEqual(BigQuery.getType_(bq.datetime('')).type, 'DATETIME'); - assert.strictEqual(BigQuery.getType_(bq.time()).type, 'TIME'); - assert.strictEqual(BigQuery.getType_(bq.timestamp(0)).type, 'TIMESTAMP'); - assert.strictEqual(BigQuery.getType_(Buffer.alloc(2)).type, 'BYTES'); - assert.strictEqual(BigQuery.getType_(true).type, 'BOOL'); - assert.strictEqual(BigQuery.getType_(8).type, 'INT64'); - assert.strictEqual(BigQuery.getType_(8.1).type, 'FLOAT64'); - assert.strictEqual(BigQuery.getType_('hi').type, 'STRING'); - assert.strictEqual(BigQuery.getType_(new Big('1.1')).type, 'NUMERIC'); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(bq.date()).type, + 'DATE' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(bq.datetime('')).type, + 'DATETIME' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(bq.time()).type, + 'TIME' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(bq.timestamp(0)).type, + 'TIMESTAMP' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(Buffer.alloc(2)).type, + 'BYTES' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(true).type, + 'BOOL' + ); + assert.strictEqual(BigQuery.getTypeDescriptorFromValue_(8).type, 'INT64'); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(8.1).type, + 'FLOAT64' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_('hi').type, + 'STRING' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(new Big('1.1')).type, + 'NUMERIC' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_( + new Big('1999.9876543210123456789') + ).type, + 'BIGNUMERIC' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(bq.int('100')).type, + 'INT64' + ); + assert.strictEqual( + BigQuery.getTypeDescriptorFromValue_(bq.geography('POINT (1 1')).type, + 'GEOGRAPHY' + ); }); it('should return correct type for an array', () => { - const type = BigQuery.getType_([1]); + const type = BigQuery.getTypeDescriptorFromValue_([1]); assert.deepStrictEqual(type, { type: 'ARRAY', @@ -599,7 +1085,7 @@ describe('BigQuery', () => { }); it('should return correct type for a struct', () => { - const type = BigQuery.getType_({prop: 1}); + const type = BigQuery.getTypeDescriptorFromValue_({prop: 1}); assert.deepStrictEqual(type, { type: 'STRUCT', @@ -615,30 +1101,110 @@ describe('BigQuery', () => { }); it('should throw if a type cannot be detected', () => { - const expectedError = new RegExp([ - 'This value could not be translated to a BigQuery data type.', - undefined, - ].join('\n')); + const expectedError = new RegExp( + [ + 'This value could not be translated to a BigQuery data type.', + undefined, + ].join('\n') + ); + + assert.throws(() => { + BigQuery.getTypeDescriptorFromValue_(undefined); + }, expectedError); + }); + + it('should throw with an empty array', () => { + assert.throws(() => { + BigQuery.getTypeDescriptorFromValue_([]); + }, /Parameter types must be provided for empty arrays via the 'types' field in query options./); + }); + + it('should throw with a null value', () => { + const expectedError = new RegExp( + "Parameter types must be provided for null values via the 'types' field in query options." + ); assert.throws(() => { - BigQuery.getType_(undefined); + BigQuery.getTypeDescriptorFromValue_(null); }, expectedError); }); }); + describe('getTypeDescriptorFromProvidedType_', () => { + it('should return correct type for an array', () => { + const type = BigQuery.getTypeDescriptorFromProvidedType_(['INT64']); + + assert.deepStrictEqual(type, { + type: 'ARRAY', + arrayType: { + type: 'INT64', + }, + }); + }); + + it('should return correct type for a struct', () => { + const type = BigQuery.getTypeDescriptorFromProvidedType_({prop: 'INT64'}); + + assert.deepStrictEqual(type, { + type: 'STRUCT', + structTypes: [ + { + name: 'prop', + type: { + type: 'INT64', + }, + }, + ], + }); + }); + + it('should throw for invalid provided type', () => { + const INVALID_TYPE = 'invalid'; + + assert.throws(() => { + BigQuery.getTypeDescriptorFromProvidedType_(INVALID_TYPE); + }, /Invalid type provided:/); + }); + }); + describe('valueToQueryParameter_', () => { it('should get the type', done => { const value = {}; - sandbox.stub(BigQuery, 'getType_').callsFake(value_ => { - assert.strictEqual(value_, value); - setImmediate(done); - return { - type: '', - }; - }); + sandbox + .stub(BigQuery, 'getTypeDescriptorFromValue_') + .callsFake(value_ => { + assert.strictEqual(value_, value); + setImmediate(done); + return { + type: '', + }; + }); - BigQuery.valueToQueryParameter_(value); + const queryParameter = BigQuery.valueToQueryParameter_(value); + assert.strictEqual(queryParameter.parameterValue.value, value); + }); + + it('should get the provided type', done => { + const value = {}; + const providedType = 'STRUCT'; + + sandbox + .stub(BigQuery, 'getTypeDescriptorFromProvidedType_') + .callsFake(providedType_ => { + assert.strictEqual(providedType_, providedType); + setImmediate(done); + return { + type: '', + }; + }); + + const queryParameter = BigQuery.valueToQueryParameter_( + value, + providedType + ); + + assert.strictEqual(queryParameter.parameterValue.value, value); }); it('should format a Date', () => { @@ -652,7 +1218,7 @@ describe('BigQuery', () => { }; }); - sandbox.stub(BigQuery, 'getType_').returns({ + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ type: 'TIMESTAMP', }); @@ -665,7 +1231,7 @@ describe('BigQuery', () => { value: 'value', }; - sandbox.stub(BigQuery, 'getType_').returns({ + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ type: 'DATETIME', }); @@ -673,12 +1239,28 @@ describe('BigQuery', () => { assert.strictEqual(queryParameter.parameterValue.value, datetime.value); }); + it('should locate the value on nested DATETIME objects', () => { + const datetimes = [ + { + value: 'value', + }, + ]; + + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ + type: 'ARRAY', + arrayType: {type: 'DATETIME'}, + }); + + const {parameterValue} = BigQuery.valueToQueryParameter_(datetimes); + assert.deepStrictEqual(parameterValue.arrayValues, datetimes); + }); + it('should locate the value on TIME objects', () => { const time = { value: 'value', }; - sandbox.stub(BigQuery, 'getType_').returns({ + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ type: 'TIME', }); @@ -686,9 +1268,54 @@ describe('BigQuery', () => { assert.strictEqual(queryParameter.parameterValue.value, time.value); }); + it('should locate the value on nested TIME objects', () => { + const times = [ + { + value: 'value', + }, + ]; + + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ + type: 'ARRAY', + arrayType: {type: 'TIME'}, + }); + + const {parameterValue} = BigQuery.valueToQueryParameter_(times); + assert.deepStrictEqual(parameterValue.arrayValues, times); + }); + + it('should locate the value on BigQueryInt objects', () => { + const int = new BigQueryInt(100); + + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ + type: 'INT64', + }); + + const queryParameter = BigQuery.valueToQueryParameter_(int); + assert.strictEqual(queryParameter.parameterValue.value, int.value); + }); + + it('should locate the value on nested BigQueryInt objects', () => { + const ints = [new BigQueryInt('100')]; + const expected = [{value: '100'}]; + + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ + type: 'ARRAY', + arrayType: {type: 'INT64'}, + }); + + const {parameterValue} = BigQuery.valueToQueryParameter_(ints); + assert.deepStrictEqual(parameterValue.arrayValues, expected); + }); + it('should format an array', () => { const array = [1]; - sandbox.stub(BigQuery, 'getType_').returns({type: 'ARRAY'}); + + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ + type: 'ARRAY', + arrayType: {type: 'INT64'}, + }); + const queryParameter = BigQuery.valueToQueryParameter_(array); const arrayValues = queryParameter.parameterValue.arrayValues; assert.deepStrictEqual(arrayValues, [ @@ -698,6 +1325,34 @@ describe('BigQuery', () => { ]); }); + it('should format an array with provided type', () => { + const array = [[1]]; + const providedType = [['INT64']]; + + sandbox.stub(BigQuery, 'getTypeDescriptorFromProvidedType_').returns({ + type: 'ARRAY', + arrayType: { + type: 'ARRAY', + arrayType: {type: 'INT64'}, + }, + }); + + const queryParameter = BigQuery.valueToQueryParameter_( + array, + providedType + ); + const arrayValues = queryParameter.parameterValue.arrayValues; + assert.deepStrictEqual(arrayValues, [ + { + arrayValues: [ + { + value: array[0][0], + }, + ], + }, + ]); + }); + it('should format a struct', () => { const struct = { key: 'value', @@ -705,7 +1360,7 @@ describe('BigQuery', () => { const expectedParameterValue = {}; - sandbox.stub(BigQuery, 'getType_').callsFake(() => { + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').callsFake(() => { sandbox.stub(BigQuery, 'valueToQueryParameter_').callsFake(value => { assert.strictEqual(value, struct.key); return { @@ -724,9 +1379,67 @@ describe('BigQuery', () => { assert.strictEqual(structValues.key, expectedParameterValue); }); + it('should format a struct with provided type', () => { + const struct = {a: 1}; + const providedType = {a: 'INT64'}; + + const getTypeStub = sandbox.stub( + BigQuery, + 'getTypeDescriptorFromProvidedType_' + ); + getTypeStub.onFirstCall().returns({ + type: 'STRUCT', + structTypes: [ + { + name: 'a', + type: { + type: 'INT64', + }, + }, + ], + }); + getTypeStub.onSecondCall().returns({type: 'INT64'}); + + const queryParameter = BigQuery.valueToQueryParameter_( + struct, + providedType + ); + const structValues = queryParameter.parameterValue.structValues; + assert.deepStrictEqual(structValues, { + a: { + value: 1, + }, + }); + }); + + it('should format an array of structs', () => { + const structs = [{name: 'Stephen'}]; + const expectedParam = { + parameterType: { + type: 'ARRAY', + arrayType: { + type: 'STRUCT', + structTypes: [{name: 'name', type: {type: 'STRING'}}], + }, + }, + parameterValue: { + arrayValues: [ + { + structValues: { + name: {value: 'Stephen'}, + }, + }, + ], + }, + }; + + const param = BigQuery.valueToQueryParameter_(structs); + assert.deepStrictEqual(param, expectedParam); + }); + it('should format all other types', () => { const typeName = 'ANY-TYPE'; - sandbox.stub(BigQuery, 'getType_').returns({ + sandbox.stub(BigQuery, 'getTypeDescriptorFromValue_').returns({ type: typeName, }); assert.deepStrictEqual(BigQuery.valueToQueryParameter_(8), { @@ -738,6 +1451,45 @@ describe('BigQuery', () => { }, }); }); + + describe('_getValue', () => { + it('should return currect value', () => { + const value = 'VALUE'; + const type = 'TYPE'; + + sandbox.stub(BigQuery, '_isCustomType').returns(false); + assert.strictEqual(BigQuery._getValue(value, type), value); + }); + + it('should return value of custom type', () => { + const geography = bq.geography('POINT (1 1)'); + + sandbox.stub(BigQuery, '_isCustomType').returns(true); + assert.strictEqual( + BigQuery._getValue(geography, geography.type), + geography.value + ); + }); + + it('should handle null values', () => { + const value = null; + const type = 'TYPE'; + + assert.strictEqual(BigQuery._getValue(value, type), value); + }); + }); + + describe('_isCustomType', () => { + it('should identify custom types', () => { + const time = {type: 'TIME'}; + const date = {type: 'DATE'}; + const geo = {type: 'GEOGRAPHY'}; + + assert.strictEqual(BigQuery._isCustomType(time), true); + assert.strictEqual(BigQuery._isCustomType(date), true); + assert.strictEqual(BigQuery._isCustomType(geo), true); + }); + }); }); describe('createDataset', () => { @@ -777,7 +1529,7 @@ describe('BigQuery', () => { c: 'd', }; - const originalOptions = extend({}, options); + const originalOptions = Object.assign({}, options); bq.request = (reqOpts: DecorateRequestOptions) => { assert.notStrictEqual(reqOpts.json, options); @@ -821,11 +1573,13 @@ describe('BigQuery', () => { }; bq.createDataset( - DATASET_ID, (err: Error, dataset: Dataset, apiResponse: {}) => { - assert.ifError(err); - assert.deepStrictEqual(apiResponse, resp); - done(); - }); + DATASET_ID, + (err: Error, dataset: Dataset, apiResponse: {}) => { + assert.ifError(err); + assert.deepStrictEqual(apiResponse, resp); + done(); + } + ); }); it('should assign metadata to the Dataset object', done => { @@ -858,7 +1612,7 @@ describe('BigQuery', () => { beforeEach(() => { fakeJobId = uuid.v4(); - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any (fakeUuid as any).v4 = () => { return fakeJobId; }; @@ -869,7 +1623,7 @@ describe('BigQuery', () => { a: 'b', }; - const expectedOptions = extend({}, fakeOptions, { + const expectedOptions = Object.assign({}, fakeOptions, { jobReference: { projectId: bq.projectId, jobId: fakeJobId, @@ -945,7 +1699,7 @@ describe('BigQuery', () => { bq.createJob({}, assert.ifError); }); - it('should return any request errors', done => { + it('should return a non-409 request error', done => { const response = {}; const error = new Error('err.'); @@ -961,6 +1715,40 @@ describe('BigQuery', () => { }); }); + it('should refresh metadata when API returns 409', done => { + bq.job = () => { + return { + getMetadata: async () => [RESPONSE], + }; + }; + + bq.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + const error = new util.ApiError('Error.'); + error.code = 409; + callback(error); + }; + + bq.createJob({}, (err: Error, job: Job, resp: {}) => { + assert.ifError(err); + assert.strictEqual(resp, RESPONSE); + done(); + }); + }); + + it('should return 409 if the user provided the job ID', done => { + const error = new util.ApiError('Error.'); + error.code = 409; + + bq.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(error); + }; + + bq.createJob({jobId: 'job-id'}, (err: Error) => { + assert.strictEqual(err, error); + done(); + }); + }); + it('should return any status errors', done => { const errors = [{reason: 'notFound'}]; const response = extend(true, {}, RESPONSE, { @@ -973,8 +1761,8 @@ describe('BigQuery', () => { bq.createJob({}, (err: FakeApiError) => { assert(err instanceof FakeApiError); - - const errorOpts = err.calledWith_[0]; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const errorOpts: any = err.calledWith_[0]; assert.deepStrictEqual(errorOpts.errors, errors); assert.strictEqual(errorOpts.response, response); done(); @@ -994,7 +1782,7 @@ describe('BigQuery', () => { callback(null, RESPONSE); }; - bq.createJob({}, (err: Error, job: Job, resp: {}) => { + bq.createJob({location: LOCATION}, (err: Error, job: Job, resp: {}) => { assert.ifError(err); assert.strictEqual(job, fakeJob); assert.strictEqual(job.metadata, RESPONSE); @@ -1002,6 +1790,24 @@ describe('BigQuery', () => { done(); }); }); + + it('should update the job location in the official API format', done => { + const fakeJob: {location?: string} = {}; + + bq.job = () => { + return fakeJob; + }; + + bq.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(null, RESPONSE); + }; + + bq.createJob({}, (err: Error) => { + assert.ifError(err); + assert.strictEqual(fakeJob.location, LOCATION); + done(); + }); + }); }); describe('createQueryJob', () => { @@ -1022,7 +1828,7 @@ describe('BigQuery', () => { }); describe('with destination', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let dataset: any; const TABLE_ID = 'table-id'; @@ -1046,11 +1852,13 @@ describe('BigQuery', () => { it('should assign destination table to request body', done => { bq.request = (reqOpts: DecorateRequestOptions) => { assert.deepStrictEqual( - reqOpts.json.configuration.query.destinationTable, { - datasetId: dataset.id, - projectId: dataset.bigQuery.projectId, - tableId: TABLE_ID, - }); + reqOpts.json.configuration.query.destinationTable, + { + datasetId: dataset.id, + projectId: dataset.bigQuery.projectId, + tableId: TABLE_ID, + } + ); done(); }; @@ -1082,37 +1890,44 @@ describe('BigQuery', () => { const POSITIONAL_PARAMS = ['value']; + const NAMED_TYPES = {key: 'STRING'}; + + const POSITIONAL_TYPES = ['STRING']; + it('should delete the params option', done => { bq.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.params, undefined); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + assert.strictEqual((reqOpts as any).params, undefined); done(); }; bq.createQueryJob( - { - query: QUERY_STRING, - params: NAMED_PARAMS, - }, - assert.ifError); + { + query: QUERY_STRING, + params: NAMED_PARAMS, + }, + assert.ifError + ); }); describe('named', () => { it('should set the correct parameter mode', done => { bq.createJob = (reqOpts: JobOptions) => { - const query = reqOpts.configuration.query; + const query = reqOpts.configuration!.query!; assert.strictEqual(query.parameterMode, 'named'); done(); }; bq.createQueryJob( - { - query: QUERY_STRING, - params: NAMED_PARAMS, - }, - assert.ifError); + { + query: QUERY_STRING, + params: NAMED_PARAMS, + }, + assert.ifError + ); }); - it('should get set the correct query parameters', done => { + it('should set the correct query parameters', done => { const queryParameter = {}; BigQuery.valueToQueryParameter_ = (value: {}) => { @@ -1121,38 +1936,105 @@ describe('BigQuery', () => { }; bq.createJob = (reqOpts: JobOptions) => { - const query = reqOpts.configuration.query; - assert.strictEqual(query.queryParameters[0], queryParameter); - assert.strictEqual(query.queryParameters[0].name, 'key'); + const query = reqOpts.configuration!.query!; + assert.strictEqual(query.queryParameters![0], queryParameter); + assert.strictEqual(query.queryParameters![0].name, 'key'); done(); }; bq.createQueryJob( - { - query: QUERY_STRING, - params: NAMED_PARAMS, - }, - assert.ifError); + { + query: QUERY_STRING, + params: NAMED_PARAMS, + }, + assert.ifError + ); + }); + + it('should allow for optional parameter types', () => { + const queryParameter = {}; + + BigQuery.valueToQueryParameter_ = ( + value: {}, + providedType: string + ) => { + assert.strictEqual(value, NAMED_PARAMS.key); + assert.strictEqual(providedType, NAMED_TYPES.key); + return queryParameter; + }; + bq.createJob = (reqOpts: JobOptions) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + assert.strictEqual((reqOpts as any).params, undefined); + }; + + bq.createQueryJob( + { + query: QUERY_STRING, + params: NAMED_PARAMS, + types: NAMED_TYPES, + }, + assert.ifError + ); + }); + + it('should allow for providing only some parameter types', () => { + const queryParameter = {}; + + BigQuery.valueToQueryParameter_ = (value: {}) => { + assert.strictEqual(value, NAMED_PARAMS.key); + return queryParameter; + }; + + bq.createJob = (reqOpts: JobOptions) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + assert.strictEqual((reqOpts as any).params, undefined); + }; + + bq.createQueryJob( + { + query: QUERY_STRING, + params: NAMED_PARAMS, + types: {}, + }, + assert.ifError + ); + }); + + it('should throw for invalid type structure provided', () => { + assert.throws(() => { + bq.createQueryJob({ + query: QUERY_STRING, + params: NAMED_PARAMS, + types: POSITIONAL_TYPES, + }); + }, /Provided types must match the value type passed to `params`/); }); }); describe('positional', () => { it('should set the correct parameter mode', done => { + const queryParameter = {}; + + BigQuery.valueToQueryParameter_ = () => { + return queryParameter; + }; + bq.createJob = (reqOpts: JobOptions) => { - const query = reqOpts.configuration.query; + const query = reqOpts.configuration!.query!; assert.strictEqual(query.parameterMode, 'positional'); done(); }; bq.createQueryJob( - { - query: QUERY_STRING, - params: POSITIONAL_PARAMS, - }, - assert.ifError); + { + query: QUERY_STRING, + params: POSITIONAL_PARAMS, + }, + assert.ifError + ); }); - it('should get set the correct query parameters', done => { + it('should set the correct query parameters', done => { const queryParameter = {}; BigQuery.valueToQueryParameter_ = (value: {}) => { @@ -1161,17 +2043,80 @@ describe('BigQuery', () => { }; bq.createJob = (reqOpts: JobOptions) => { - const query = reqOpts.configuration.query; - assert.strictEqual(query.queryParameters[0], queryParameter); + const query = reqOpts.configuration!.query!; + assert.strictEqual(query.queryParameters![0], queryParameter); done(); }; bq.createQueryJob( - { - query: QUERY_STRING, - params: POSITIONAL_PARAMS, - }, - assert.ifError); + { + query: QUERY_STRING, + params: POSITIONAL_PARAMS, + }, + assert.ifError + ); + }); + + it('should convert value and type to query parameter', done => { + const fakeQueryParameter = {fake: 'query parameter'}; + + bq.createJob = (reqOpts: JobOptions) => { + const queryParameters = reqOpts.configuration!.query! + .queryParameters; + assert.deepStrictEqual(queryParameters, [fakeQueryParameter]); + done(); + }; + + sandbox + .stub(BigQuery, 'valueToQueryParameter_') + .callsFake((value, type) => { + assert.strictEqual(value, POSITIONAL_PARAMS[0]); + assert.strictEqual(type, POSITIONAL_TYPES[0]); + return fakeQueryParameter; + }); + + bq.createQueryJob({ + query: QUERY_STRING, + params: POSITIONAL_PARAMS, + types: POSITIONAL_TYPES, + }); + }); + + it('should allow for optional parameter types', () => { + bq.createJob = (reqOpts: JobOptions) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + assert.strictEqual((reqOpts as any).params, undefined); + }; + + bq.createQueryJob( + { + query: QUERY_STRING, + params: POSITIONAL_PARAMS, + types: POSITIONAL_TYPES, + }, + assert.ifError + ); + }); + + it('should throw for invalid type structure provided for positional params', () => { + assert.throws(() => { + bq.createQueryJob({ + query: QUERY_STRING, + params: POSITIONAL_PARAMS, + types: NAMED_TYPES, + }); + }, /Provided types must match the value type passed to `params`/); + }); + + it('should throw for incorrect number of types provided for positional params', () => { + const ADDITIONAL_TYPES = ['string', 'string']; + assert.throws(() => { + bq.createQueryJob({ + query: QUERY_STRING, + params: POSITIONAL_PARAMS, + types: ADDITIONAL_TYPES, + }); + }, /Incorrect number of parameter types provided./); }); }); }); @@ -1183,8 +2128,31 @@ describe('BigQuery', () => { }; bq.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.configuration.query.dryRun, undefined); - assert.strictEqual(reqOpts.configuration.dryRun, options.dryRun); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.query as any).dryRun, + undefined + ); + assert.strictEqual(reqOpts.configuration!.dryRun, options.dryRun); + done(); + }; + + bq.createQueryJob(options, assert.ifError); + }); + + it('should accept the label options', done => { + const options = { + query: QUERY_STRING, + labels: {foo: 'bar'}, + }; + + bq.createJob = (reqOpts: JobOptions) => { + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.query as any).labels, + undefined + ); + assert.deepStrictEqual(reqOpts.configuration!.labels, options.labels); done(); }; @@ -1198,7 +2166,11 @@ describe('BigQuery', () => { }; bq.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.configuration.query.jobPrefix, undefined); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.query as any).jobPrefix, + undefined + ); assert.strictEqual(reqOpts.jobPrefix, options.jobPrefix); done(); }; @@ -1213,7 +2185,11 @@ describe('BigQuery', () => { }; bq.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.configuration.query.location, undefined); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.query as any).location, + undefined + ); assert.strictEqual(reqOpts.location, LOCATION); done(); }; @@ -1228,7 +2204,11 @@ describe('BigQuery', () => { }; bq.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.configuration.query.jobId, undefined); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.query as any).jobId, + undefined + ); assert.strictEqual(reqOpts.jobId, options.jobId); done(); }; @@ -1236,9 +2216,31 @@ describe('BigQuery', () => { bq.createQueryJob(options, assert.ifError); }); + it('should accept the jobTimeoutMs options', done => { + const options = { + query: QUERY_STRING, + jobTimeoutMs: 1000, + }; + + bq.createJob = (reqOpts: JobOptions) => { + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.query as any).jobTimeoutMs, + undefined + ); + assert.strictEqual( + reqOpts.configuration!.jobTimeoutMs, + options.jobTimeoutMs + ); + done(); + }; + + bq.createQueryJob(options, assert.ifError); + }); + it('should pass the callback to createJob', done => { bq.createJob = (reqOpts: DecorateRequestOptions, callback: Function) => { - callback(); // the done fn + callback(); // the done fn }; bq.createQueryJob(QUERY_STRING, done); @@ -1281,7 +2283,7 @@ describe('BigQuery', () => { }); const options = {a: 'b'}; - const expectedOptions = extend({location: LOCATION}, options); + const expectedOptions = Object.assign({location: LOCATION}, options); const ds = bq.dataset(DATASET_ID, options); const args = ds.calledWith_; @@ -1371,11 +2373,12 @@ describe('BigQuery', () => { }; bq.getDatasets( - (err: Error, datasets: {}, nextQuery: {}, apiResponse: {}) => { - assert.ifError(err); - assert.strictEqual(apiResponse, resp); - done(); - }); + (err: Error, datasets: {}, nextQuery: {}, apiResponse: {}) => { + assert.ifError(err); + assert.strictEqual(apiResponse, resp); + done(); + } + ); }); it('should assign metadata to the Dataset objects', done => { @@ -1586,7 +2589,7 @@ describe('BigQuery', () => { }); const options = {a: 'b'}; - const expectedOptions = extend({location: LOCATION}, options); + const expectedOptions = Object.assign({location: LOCATION}, options); const job = bq.job(JOB_ID, options); const args = job.calledWith_; @@ -1656,7 +2659,7 @@ describe('BigQuery', () => { it('should assign Job on the options', done => { const fakeJob = { - getQueryResults: (options: {}, callback: Function) => { + getQueryResults: (options: {}) => { assert.deepStrictEqual(options, {job: fakeJob}); done(); }, @@ -1688,14 +2691,88 @@ describe('BigQuery', () => { }); describe('queryAsStream_', () => { + let queryStub: SinonStub; + + beforeEach(() => { + queryStub = sandbox.stub(bq, 'query').callsArgAsync(2); + }); + it('should call query correctly', done => { const query = 'SELECT'; - bq.query = (query_: {}, options: {}, callback: Function) => { - assert.strictEqual(query_, query); - assert.deepStrictEqual(options, {autoPaginate: false}); - callback(); // done() + bq.queryAsStream_(query, done); + assert( + queryStub.calledOnceWithExactly( + query, + {autoPaginate: false}, + sinon.match.func + ) + ); + }); + + it('should query as job if supplied', done => { + const cbStub = sinon.stub().callsArgAsync(1); + const query = { + job: { + getQueryResults: cbStub, + }, }; bq.queryAsStream_(query, done); + assert(cbStub.calledOnceWithExactly(query, sinon.match.func)); + assert(queryStub.notCalled); + }); + + it('should pass wrapIntegers if supplied', done => { + const statement = 'SELECT'; + const query = { + query: statement, + }; + const options = { + wrapIntegers: { + integerValue: 100, + }, + }; + bq.queryAsStream_(query, options, done); + assert( + queryStub.calledOnceWithExactly( + query, + {autoPaginate: false, wrapIntegers: options.wrapIntegers}, + sinon.match.func + ) + ); + }); + }); + + describe('#sanitizeEndpoint', () => { + const USER_DEFINED_SHORT_API_ENDPOINT = 'myapi.com:8080'; + const USER_DEFINED_PROTOCOL = 'myproto'; + const USER_DEFINED_FULL_API_ENDPOINT = `${USER_DEFINED_PROTOCOL}://myapi.com:8080`; + + it('should default protocol to https', () => { + const endpoint = BigQuery.sanitizeEndpoint( + USER_DEFINED_SHORT_API_ENDPOINT + ); + assert.strictEqual(endpoint.match(PROTOCOL_REGEX)![1], 'https'); + }); + + it('should not override protocol', () => { + const endpoint = BigQuery.sanitizeEndpoint( + USER_DEFINED_FULL_API_ENDPOINT + ); + assert.strictEqual( + endpoint.match(PROTOCOL_REGEX)![1], + USER_DEFINED_PROTOCOL + ); + }); + + it('should remove trailing slashes from URL', () => { + const endpointsWithTrailingSlashes = [ + `${USER_DEFINED_FULL_API_ENDPOINT}/`, + `${USER_DEFINED_FULL_API_ENDPOINT}//`, + ]; + for (const endpointWithTrailingSlashes of endpointsWithTrailingSlashes) { + const endpoint = BigQuery.sanitizeEndpoint(endpointWithTrailingSlashes); + assert.strictEqual(endpoint.endsWith('/'), false); + } }); }); }); diff --git a/test/dataset.ts b/test/dataset.ts index eeeb6287..1680e501 100644 --- a/test/dataset.ts +++ b/test/dataset.ts @@ -1,38 +1,51 @@ -/** - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {DecorateRequestOptions, ServiceObject, ServiceObjectConfig, util} from '@google-cloud/common'; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + DecorateRequestOptions, + ServiceObject, + ServiceObjectConfig, + util, +} from '@google-cloud/common'; import * as pfy from '@google-cloud/promisify'; -import * as arrify from 'arrify'; +import arrify = require('arrify'); import * as assert from 'assert'; +import {describe, it, before, beforeEach} from 'mocha'; import * as extend from 'extend'; import * as proxyquire from 'proxyquire'; import * as _root from '../src'; -import {DataSetOptions} from '../src/dataset'; +import {DatasetOptions} from '../src/dataset'; import {FormattedMetadata, TableOptions} from '../src/table'; +interface CalledWithDataset extends ServiceObject { + calledWith_: Array<{ + parent: {}; + baseUrl: string; + id: string; + methods: string[]; + }>; +} + let promisified = false; -const fakePfy = extend({}, pfy, { +const fakePfy = Object.assign({}, pfy, { promisifyAll: (c: Function, options: pfy.PromisifyAllOptions) => { if (c.name !== 'Dataset') { return; } promisified = true; - assert.deepStrictEqual(options.exclude, ['table']); + assert.deepStrictEqual(options.exclude, ['model', 'routine', 'table']); }, }); @@ -45,28 +58,33 @@ const fakePaginator = { } methods = arrify(methods); assert.strictEqual(c.name, 'Dataset'); - assert.deepStrictEqual(methods, ['getTables']); + assert.deepStrictEqual(methods, [ + 'getModels', + 'getRoutines', + 'getTables', + ]); extended = true; }, streamify: (methodName: string) => { return methodName; }, - } + }, }; class FakeServiceObject extends ServiceObject { calledWith_: IArguments; constructor(config: ServiceObjectConfig) { super(config); + // eslint-disable-next-line prefer-rest-params this.calledWith_ = arguments; } } describe('BigQuery/Dataset', () => { - const BIGQUERY = { + const BIGQUERY = ({ projectId: 'my-project', createDataset: util.noop, - } as {} as _root.BigQuery; + } as {}) as _root.BigQuery; const DATASET_ID = 'kittens'; const LOCATION = 'asia-northeast1'; @@ -74,17 +92,17 @@ describe('BigQuery/Dataset', () => { let Dataset: typeof _root.Dataset; // tslint:disable-next-line variable-name let Table: typeof _root.Table; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let ds: any; before(() => { Dataset = proxyquire('../src/dataset', { - '@google-cloud/common': { - ServiceObject: FakeServiceObject, - }, - '@google-cloud/paginator': fakePaginator, - '@google-cloud/promisify': fakePfy, - }).Dataset; + '@google-cloud/common': { + ServiceObject: FakeServiceObject, + }, + '@google-cloud/paginator': fakePaginator, + '@google-cloud/promisify': fakePfy, + }).Dataset; Table = require('../src/table').Table; }); @@ -94,11 +112,12 @@ describe('BigQuery/Dataset', () => { describe('instantiation', () => { it('should extend the correct methods', () => { - assert(extended); // See `fakePaginator.extend` + assert(extended); // See `fakePaginator.extend` }); it('should streamify the correct methods', () => { assert.strictEqual(ds.getTablesStream, 'getTables'); + assert.strictEqual(ds.getModelsStream, 'getModels'); }); it('should promisify all the things', () => { @@ -108,7 +127,7 @@ describe('BigQuery/Dataset', () => { it('should inherit from ServiceObject', () => { assert(ds instanceof ServiceObject); - const calledWith = ds.calledWith_[0]; + const calledWith = (ds as CalledWithDataset).calledWith_[0]; assert.strictEqual(calledWith.parent, BIGQUERY); assert.strictEqual(calledWith.baseUrl, '/datasets'); @@ -130,11 +149,11 @@ describe('BigQuery/Dataset', () => { }); describe('createMethod', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let bq: any; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let ds: any; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let config: any; beforeEach(() => { @@ -149,7 +168,7 @@ describe('BigQuery/Dataset', () => { bq.createDataset = (id: string, options: {}, callback: Function) => { assert.strictEqual(id, DATASET_ID); assert.deepStrictEqual(options, OPTIONS); - callback(); // the done fn + callback(); // the done fn }; config.createMethod(DATASET_ID, OPTIONS, done); @@ -157,18 +176,21 @@ describe('BigQuery/Dataset', () => { it('should optionally accept options', done => { bq.createDataset = (id: string, options: {}, callback: Function) => { - callback(); // the done fn + callback(); // the done fn }; config.createMethod(DATASET_ID, done); }); it('should pass the location', done => { - bq.createDataset = - (id: string, options: DataSetOptions, callback: Function) => { - assert.strictEqual(options.location, LOCATION); - callback(); // the done fn - }; + bq.createDataset = ( + id: string, + options: DatasetOptions, + callback: Function + ) => { + assert.strictEqual(options.location, LOCATION); + callback(); // the done fn + }; ds.location = LOCATION; config.createMethod(DATASET_ID, done); @@ -205,7 +227,7 @@ describe('BigQuery/Dataset', () => { }, }; - const expectedHeaders = extend({}, fakeReqOpts.headers, { + const expectedHeaders = Object.assign({}, fakeReqOpts.headers, { 'If-Match': FAKE_ETAG, }); @@ -239,19 +261,22 @@ describe('BigQuery/Dataset', () => { }; const expectedOptions = extend( - true, { - location: LOCATION, + true, + { + location: LOCATION, + }, + fakeOptions, + { + defaultDataset: { + datasetId: ds.id, }, - fakeOptions, { - defaultDataset: { - datasetId: ds.id, - }, - }); + } + ); ds.bigQuery.createQueryJob = (options: {}, callback: Function) => { assert.deepStrictEqual(options, expectedOptions); assert.notStrictEqual(fakeOptions, options); - callback(); // the done fn + callback(); // the done fn }; ds.location = LOCATION; @@ -259,11 +284,13 @@ describe('BigQuery/Dataset', () => { }); it('should accept a query string', done => { - ds.bigQuery.createQueryJob = - (options: _root.Query, callback: Function) => { - assert.strictEqual(options.query, FAKE_QUERY); - callback(); // the done fn - }; + ds.bigQuery.createQueryJob = ( + options: _root.Query, + callback: Function + ) => { + assert.strictEqual(options.query, FAKE_QUERY); + callback(); // the done fn + }; ds.createQueryJob(FAKE_QUERY, done); }); @@ -305,7 +332,7 @@ describe('BigQuery/Dataset', () => { }); it('should pass along options', done => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any ds.bigQuery.createQueryStream = (opts: any) => { assert.strictEqual(opts.a, options.a); assert.strictEqual(opts.c, options.c); @@ -376,7 +403,9 @@ describe('BigQuery/Dataset', () => { assert.deepStrictEqual(body.schema, SCHEMA_OBJECT); assert.strictEqual(body.tableReference.datasetId, DATASET_ID); assert.strictEqual( - body.tableReference.projectId, ds.bigQuery.projectId); + body.tableReference.projectId, + ds.bigQuery.projectId + ); assert.strictEqual(body.tableReference.tableId, TABLE_ID); done(); @@ -400,7 +429,7 @@ describe('BigQuery/Dataset', () => { Table.formatMetadata_ = options => { assert.strictEqual(options, fakeOptions); - return formatted as {} as FormattedMetadata; + return (formatted as {}) as FormattedMetadata; }; ds.request = (reqOpts: DecorateRequestOptions) => { @@ -425,15 +454,19 @@ describe('BigQuery/Dataset', () => { it('should wrap an array schema', done => { ds.request = (reqOpts: DecorateRequestOptions) => { assert.deepStrictEqual( - reqOpts.json.schema.fields, SCHEMA_OBJECT.fields); + reqOpts.json.schema.fields, + SCHEMA_OBJECT.fields + ); done(); }; ds.createTable( - TABLE_ID, { - schema: SCHEMA_OBJECT.fields, - }, - assert.ifError); + TABLE_ID, + { + schema: SCHEMA_OBJECT.fields, + }, + assert.ifError + ); }); it('should assign record type to nested schemas', done => { @@ -448,12 +481,14 @@ describe('BigQuery/Dataset', () => { }; ds.createTable( - TABLE_ID, { - schema: { - fields: [{id: 'name', type: 'STRING'}, nestedField], - }, + TABLE_ID, + { + schema: { + fields: [{id: 'name', type: 'STRING'}, nestedField], }, - assert.ifError); + }, + assert.ifError + ); }); it('should return an error to the callback', done => { @@ -475,16 +510,18 @@ describe('BigQuery/Dataset', () => { }; ds.createTable( - TABLE_ID, {schema: SCHEMA_OBJECT}, - (err: Error, table: _root.Table) => { - assert.ifError(err); - assert(table instanceof Table); - done(); - }); + TABLE_ID, + {schema: SCHEMA_OBJECT}, + (err: Error, table: _root.Table) => { + assert.ifError(err); + assert(table instanceof Table); + done(); + } + ); }); it('should pass the location to the Table', done => { - const response = extend({location: LOCATION}, API_RESPONSE); + const response = Object.assign({location: LOCATION}, API_RESPONSE); ds.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(null, response); @@ -507,32 +544,38 @@ describe('BigQuery/Dataset', () => { }; ds.createTable( - TABLE_ID, opts, (err: Error, table: _root.Table, apiResponse: {}) => { - assert.ifError(err); - assert.strictEqual(apiResponse, API_RESPONSE); - done(); - }); + TABLE_ID, + opts, + (err: Error, table: _root.Table, apiResponse: {}) => { + assert.ifError(err); + assert.strictEqual(apiResponse, API_RESPONSE); + done(); + } + ); }); it('should assign metadata to the Table object', done => { - const apiResponse = extend( - { - a: 'b', - c: 'd', - }, - API_RESPONSE); + const apiResponse = Object.assign( + { + a: 'b', + c: 'd', + }, + API_RESPONSE + ); ds.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(null, apiResponse); }; ds.createTable( - TABLE_ID, {schema: SCHEMA_OBJECT}, - (err: Error, table: _root.Table) => { - assert.ifError(err); - assert.strictEqual(table.metadata, apiResponse); - done(); - }); + TABLE_ID, + {schema: SCHEMA_OBJECT}, + (err: Error, table: _root.Table) => { + assert.ifError(err); + assert.strictEqual(table.metadata, apiResponse); + done(); + } + ); }); }); @@ -592,6 +635,127 @@ describe('BigQuery/Dataset', () => { }); }); + describe('getModels', () => { + it('should get models from the api', done => { + ds.request = (reqOpts: DecorateRequestOptions) => { + assert.strictEqual(reqOpts.uri, '/models'); + assert.deepStrictEqual(reqOpts.qs, {}); + done(); + }; + + ds.getModels(assert.ifError); + }); + + it('should accept a query', done => { + const query = { + maxResults: 8, + pageToken: 'token', + }; + + ds.request = (reqOpts: DecorateRequestOptions) => { + assert.strictEqual(reqOpts.qs, query); + done(); + }; + + ds.getModels(query, assert.ifError); + }); + + it('should default the query value to an empty object', done => { + ds.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.qs, {}); + done(); + }; + + ds.getModels(assert.ifError); + }); + + it('should return error to callback', done => { + const error = new Error('Error.'); + + ds.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(error); + }; + + ds.getModels((err: Error) => { + assert.strictEqual(err, error); + done(); + }); + }); + + describe('success', () => { + const modelId = 'modelName'; + const apiResponse = { + models: [ + { + a: 'b', + c: 'd', + modelReference: {modelId}, + }, + ], + }; + + beforeEach(() => { + ds.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(null, apiResponse); + }; + }); + + it('should return Model & apiResponse', done => { + ds.getModels( + ( + err: Error, + models: _root.Model[], + nextQuery: {}, + apiResponse_: {} + ) => { + assert.ifError(err); + + const model = models[0]; + + assert(model instanceof _root.Model); + assert.strictEqual(model.id, modelId); + assert.strictEqual(apiResponse_, apiResponse); + done(); + } + ); + }); + + it('should assign metadata to the Model objects', done => { + ds.getModels((err: Error, models: _root.Model[]) => { + assert.ifError(err); + assert.strictEqual(models[0].metadata, apiResponse.models[0]); + done(); + }); + }); + + it('should return token if more results exist', done => { + const pageToken = 'token'; + + const query = { + maxResults: 5, + }; + + const expectedNextQuery = { + maxResults: 5, + pageToken, + }; + + ds.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(null, {nextPageToken: pageToken}); + }; + + ds.getModels( + query, + (err: Error, tables: _root.Model[], nextQuery: {}) => { + assert.ifError(err); + assert.deepStrictEqual(nextQuery, expectedNextQuery); + done(); + } + ); + }); + }); + }); + describe('getTables', () => { it('should get tables from the api', done => { ds.request = (reqOpts: DecorateRequestOptions) => { @@ -660,18 +824,23 @@ describe('BigQuery/Dataset', () => { it('should return Table & apiResponse', done => { ds.getTables( - (err: Error, tables: _root.Table[], nextQuery: {}, - apiResponse_: {}) => { - assert.ifError(err); - - const table = tables[0]; - - assert(table instanceof Table); - assert.strictEqual(table.id, tableId); - assert.strictEqual(table.location, LOCATION); - assert.strictEqual(apiResponse_, apiResponse); - done(); - }); + ( + err: Error, + tables: _root.Table[], + nextQuery: {}, + apiResponse_: {} + ) => { + assert.ifError(err); + + const table = tables[0]; + + assert(table instanceof Table); + assert.strictEqual(table.id, tableId); + assert.strictEqual(table.location, LOCATION); + assert.strictEqual(apiResponse_, apiResponse); + done(); + } + ); }); it('should assign metadata to the Table objects', done => { @@ -699,15 +868,31 @@ describe('BigQuery/Dataset', () => { }; ds.getTables( - query, (err: Error, tables: _root.Table[], nextQuery: {}) => { - assert.ifError(err); - assert.deepStrictEqual(nextQuery, expectedNextQuery); - done(); - }); + query, + (err: Error, tables: _root.Table[], nextQuery: {}) => { + assert.ifError(err); + assert.deepStrictEqual(nextQuery, expectedNextQuery); + done(); + } + ); }); }); }); + describe('model', () => { + it('should throw an error if the id is missing', () => { + const expectedErr = /A model ID is required\./; + assert.throws(() => ds.model(), expectedErr); + }); + + it('should return a Model object', () => { + const modelId = 'modelId'; + const model = ds.model(modelId); + assert(model instanceof _root.Model); + assert.strictEqual(model.id, modelId); + }); + }); + describe('query', () => { const options = { a: 'b', @@ -734,7 +919,7 @@ describe('BigQuery/Dataset', () => { }); it('should pass along options', done => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any ds.bigQuery.query = (opts: any) => { assert.strictEqual(opts.a, options.a); assert.strictEqual(opts.c, options.c); diff --git a/test/job.ts b/test/job.ts index b341e170..ab195ff8 100644 --- a/test/job.ts +++ b/test/job.ts @@ -1,43 +1,51 @@ -/** - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. import {DecorateRequestOptions, util} from '@google-cloud/common'; import * as pfy from '@google-cloud/promisify'; -import * as arrify from 'arrify'; +import arrify = require('arrify'); import * as assert from 'assert'; -import * as extend from 'extend'; +import {describe, it, beforeEach, afterEach, before} from 'mocha'; import * as proxyquire from 'proxyquire'; import * as sinon from 'sinon'; -import {BigQuery} from '../src'; +import {BigQuery} from '../src/bigquery'; import {QueryResultsOptions} from '../src/job'; class FakeOperation { - calledWith_: IArguments; + calledWith_: Array<{}>; interceptors: Array<{}>; id: {}; - constructor() { - this.calledWith_ = arguments; + constructor(...args: Array<{}>) { + this.calledWith_ = args; this.interceptors = []; - this.id = this.calledWith_[0].id; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.id = (this.calledWith_[0] as any).id; } } +interface CalledWithJob extends FakeOperation { + calledWith_: Array<{ + parent: {}; + baseUrl: string; + id: string; + methods: string[]; + }>; +} + let promisified = false; -const fakePfy = extend({}, pfy, { +const fakePfy = Object.assign({}, pfy, { promisifyAll: (c: Function) => { if (c.name === 'Job') { promisified = true; @@ -60,15 +68,13 @@ const fakePaginator = { streamify: (methodName: string) => { return methodName; }, - } + }, }; -let sandbox: sinon.SinonSandbox; -beforeEach(() => sandbox = sinon.createSandbox()); -afterEach(() => sandbox.restore()); +const sandbox = sinon.createSandbox(); describe('BigQuery/Job', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any const BIGQUERY: any = { projectId: 'my-project', Promise, @@ -76,23 +82,25 @@ describe('BigQuery/Job', () => { const JOB_ID = 'job_XYrk_3z'; const LOCATION = 'asia-northeast1'; - // tslint:disable-next-line no-any variable-name + // eslint-disable-next-line @typescript-eslint/no-explicit-any let Job: any; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let job: any; before(() => { Job = proxyquire('../src/job.js', { - '@google-cloud/common': {Operation: FakeOperation}, - '@google-cloud/paginator': fakePaginator, - '@google-cloud/promisify': fakePfy, - }).Job; + '@google-cloud/common': {Operation: FakeOperation}, + '@google-cloud/paginator': fakePaginator, + '@google-cloud/promisify': fakePfy, + }).Job; }); beforeEach(() => { job = new Job(BIGQUERY, JOB_ID); }); + afterEach(() => sandbox.restore()); + describe('initialization', () => { it('should paginate all the things', () => { assert(extended); @@ -109,7 +117,7 @@ describe('BigQuery/Job', () => { it('should inherit from Operation', () => { assert(job instanceof FakeOperation); - const calledWith = job.calledWith_[0]; + const calledWith = (job as CalledWithJob).calledWith_[0]; assert.strictEqual(calledWith.parent, BIGQUERY); assert.strictEqual(calledWith.baseUrl, '/jobs'); @@ -142,6 +150,18 @@ describe('BigQuery/Job', () => { }, }); }); + + it('should update the location after initializing job object', () => { + const job = new Job(BIGQUERY, JOB_ID); + job.location = LOCATION; + const calledWith = job.calledWith_[0]; + + assert.deepStrictEqual(calledWith.methods.getMetadata, { + reqOpts: { + qs: {location: LOCATION}, + }, + }); + }); }); describe('cancel', () => { @@ -181,12 +201,18 @@ describe('BigQuery/Job', () => { }; beforeEach(() => { - BIGQUERY.request = - (reqOpts: DecorateRequestOptions, callback: Function) => { - callback(null, RESPONSE); - }; + BIGQUERY.request = ( + reqOpts: DecorateRequestOptions, + callback: Function + ) => { + callback(null, RESPONSE); + }; - BIGQUERY.mergeSchemaWithRows_ = (schema: {}, rows: {}) => { + BIGQUERY.mergeSchemaWithRows_ = ( + schema: {}, + rows: {}, + wrapIntegers: {} + ) => { return rows; }; }); @@ -202,7 +228,7 @@ describe('BigQuery/Job', () => { it('should optionally accept options', done => { const options = {a: 'b'}; - const expectedOptions = extend({location: undefined}, options); + const expectedOptions = Object.assign({location: undefined}, options); BIGQUERY.request = (reqOpts: DecorateRequestOptions) => { assert.deepStrictEqual(reqOpts.qs, expectedOptions); @@ -223,14 +249,28 @@ describe('BigQuery/Job', () => { job.getQueryResults(assert.ifError); }); + it('should delete any cached jobs', done => { + const options = {job: {}, a: 'b'}; + const expectedOptions = {location: undefined, a: 'b'}; + + BIGQUERY.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.qs, expectedOptions); + done(); + }; + + job.getQueryResults(options, assert.ifError); + }); + it('should return any errors to the callback', done => { const error = new Error('err'); const response = {}; - BIGQUERY.request = - (reqOpts: DecorateRequestOptions, callback: Function) => { - callback(error, response); - }; + BIGQUERY.request = ( + reqOpts: DecorateRequestOptions, + callback: Function + ) => { + callback(error, response); + }; job.getQueryResults((err: Error, rows: {}, nextQuery: {}, resp: {}) => { assert.strictEqual(err, error); @@ -258,17 +298,21 @@ describe('BigQuery/Job', () => { const mergedRows: Array<{}> = []; - BIGQUERY.request = - (reqOpts: DecorateRequestOptions, callback: Function) => { - callback(null, response); - }; + BIGQUERY.request = ( + reqOpts: DecorateRequestOptions, + callback: Function + ) => { + callback(null, response); + }; - sandbox.stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema, rows) => { - assert.strictEqual(schema, response.schema); - assert.strictEqual(rows, response.rows); - return mergedRows; - }); + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema, rows, wrapIntegers) => { + assert.strictEqual(schema, response.schema); + assert.strictEqual(rows, response.rows); + assert.strictEqual(wrapIntegers, false); + return mergedRows; + }); job.getQueryResults((err: Error, rows: {}) => { assert.ifError(err); @@ -277,13 +321,43 @@ describe('BigQuery/Job', () => { }); }); + it('it should wrap integers', done => { + const response = { + schema: {}, + rows: [], + }; + + const mergedRows: Array<{}> = []; + + const options = {wrapIntegers: true}; + const expectedOptions = Object.assign({location: undefined}); + + BIGQUERY.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.qs, expectedOptions); + done(); + }; + + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema, rows, wrapIntegers) => { + assert.strictEqual(schema, response.schema); + assert.strictEqual(rows, response.rows); + assert.strictEqual(wrapIntegers, true); + return mergedRows; + }); + + job.getQueryResults(options, assert.ifError); + }); + it('should return the query when the job is not complete', done => { - BIGQUERY.request = - (reqOpts: DecorateRequestOptions, callback: Function) => { - callback(null, { - jobComplete: false, - }); - }; + BIGQUERY.request = ( + reqOpts: DecorateRequestOptions, + callback: Function + ) => { + callback(null, { + jobComplete: false, + }); + }; job.getQueryResults(options, (err: Error, rows: {}, nextQuery: {}) => { assert.ifError(err); @@ -293,13 +367,41 @@ describe('BigQuery/Job', () => { }); }); + it('should return an error when the job is not complete & timeout is overridden', done => { + const options = {job: {}, timeoutMs: 1000}; + const message = `The query did not complete before ${options.timeoutMs}ms`; + const response = { + jobComplete: false, + }; + + BIGQUERY.request = ( + reqOpts: DecorateRequestOptions, + callback: Function + ) => { + callback(null, response); + }; + + job.getQueryResults( + options, + (err: Error, rows: {}, nextQuery: {}, resp: {}) => { + assert.strictEqual(err.message, message); + assert.strictEqual(rows, null); + assert.deepStrictEqual(nextQuery, options); + assert.strictEqual(resp, response); + done(); + } + ); + }); + it('should populate nextQuery when more results exist', done => { job.getQueryResults( - options, (err: Error, rows: {}, nextQuery: QueryResultsOptions) => { - assert.ifError(err); - assert.strictEqual(nextQuery.pageToken, pageToken); - done(); - }); + options, + (err: Error, rows: {}, nextQuery: QueryResultsOptions) => { + assert.ifError(err); + assert.strictEqual(nextQuery.pageToken, pageToken); + done(); + } + ); }); }); @@ -313,15 +415,17 @@ describe('BigQuery/Job', () => { it('should call getQueryResults correctly', done => { const options = {a: 'b', c: 'd'}; - job.getQueryResults = - (options_: QueryResultsOptions, callback: Function) => { - assert.deepStrictEqual(options_, { - a: 'b', - c: 'd', - autoPaginate: false, - }); - callback(); // done() - }; + job.getQueryResults = ( + options_: QueryResultsOptions, + callback: Function + ) => { + assert.deepStrictEqual(options_, { + a: 'b', + c: 'd', + autoPaginate: false, + }); + callback(); // done() + }; job.getQueryResultsAsStream_(options, done); }); @@ -357,7 +461,8 @@ describe('BigQuery/Job', () => { const error = new Error('Error.'); const apiResponse = { status: { - errors: error, + errorResult: error, + errors: [error], }, }; @@ -365,7 +470,7 @@ describe('BigQuery/Job', () => { beforeEach(() => { job.getMetadata = (callback: Function) => { - callback(null, apiResponse, apiResponse); + callback(null, apiResponse); }; }); diff --git a/test/mocha.opts b/test/mocha.opts deleted file mode 100644 index 48bf1c3d..00000000 --- a/test/mocha.opts +++ /dev/null @@ -1,4 +0,0 @@ ---require source-map-support/register ---require intelli-espower-loader ---timeout 10000 ---throw-deprecation diff --git a/test/model.ts b/test/model.ts new file mode 100644 index 00000000..73059b40 --- /dev/null +++ b/test/model.ts @@ -0,0 +1,385 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it, before, beforeEach, afterEach} from 'mocha'; +import * as sinon from 'sinon'; +import * as proxyquire from 'proxyquire'; +import * as pfy from '@google-cloud/promisify'; +import {EventEmitter} from 'events'; +import {JobOptions} from '../src/job'; +import {ServiceObject, ServiceObjectConfig, util} from '@google-cloud/common'; + +let promisified = false; + +const fakePfy = Object.assign({}, pfy, { + promisifyAll: (c: Function) => { + if (c.name === 'Model') { + promisified = true; + } + pfy.promisifyAll(c); + }, +}); + +class FakeServiceObject extends ServiceObject { + _calledWith: IArguments; + constructor(config: ServiceObjectConfig) { + super(config); + // eslint-disable-next-line prefer-rest-params + this._calledWith = arguments; + } +} + +let isCustomTypeOverride: Function | null; +const fakeUtil = Object.assign({}, util, { + isCustomType: (...args: Array<{}>) => { + return (isCustomTypeOverride || util.isCustomType)(...args); + }, + noop: () => {}, +}); + +const sandbox = sinon.createSandbox(); + +describe('BigQuery/Model', () => { + const MODEL_ID = 'my_model'; + + const DATASET = { + id: 'dataset-id', + createTable: util.noop, + bigQuery: { + projectId: 'project-id', + job: (id: string) => { + return {id}; + }, + apiEndpoint: 'bigquery.googleapis.com', + request: util.noop, + }, + }; + + before(() => { + Model = proxyquire('../src/model.js', { + '@google-cloud/common': { + ServiceObject: FakeServiceObject, + util: fakeUtil, + }, + '@google-cloud/promisify': fakePfy, + }).Model; + }); + + beforeEach(() => { + isCustomTypeOverride = null; + model = new Model(DATASET, MODEL_ID); + model.bigQuery.request = util.noop; + model.bigQuery.createJob = util.noop; + }); + + afterEach(() => sandbox.restore()); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let model: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let Model: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + + describe('instantiation', () => { + it('should promisify all the things', () => { + assert(promisified); + }); + + it('should inherit from ServiceObject', () => { + assert(model instanceof FakeServiceObject); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const [config] = (model as any)._calledWith; + assert.strictEqual(config.parent, DATASET); + assert.strictEqual(config.baseUrl, '/models'); + assert.strictEqual(config.id, MODEL_ID); + assert.deepStrictEqual(config.methods, { + delete: true, + exists: true, + get: true, + getMetadata: true, + setMetadata: true, + }); + }); + }); + + describe('createExtractJob', () => { + const URI = 'gs://bucket-name/model-export'; + + const FILE = { + name: 'model-export', + bucket: { + name: 'bucket-name', + }, + }; + + beforeEach(() => { + isCustomTypeOverride = () => { + return false; + }; + + model.bigQuery.job = sinon.stub(); + model.bigQuery.createJob = sinon.stub(); + }); + + it('should call createJob correctly', done => { + model.bigQuery.createJob = (reqOpts: JobOptions) => { + assert.deepStrictEqual(reqOpts.configuration!.extract!.sourceModel, { + datasetId: model.dataset.id, + projectId: model.bigQuery.projectId, + modelId: model.id, + }); + + done(); + }; + + model.createExtractJob(URI, assert.ifError); + }); + + it('should accept just a destination and a callback', done => { + model.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { + callback(null, {jobReference: {jobId: 'job-id'}}); + }; + + model.createExtractJob(URI, done); + }); + + describe('formats', () => { + it('should accept ML_TF_SAVED_MODEL', done => { + model.bigQuery.createJob = (reqOpts: JobOptions) => { + const extract = reqOpts.configuration!.extract!; + assert.strictEqual(extract.destinationFormat, 'ML_TF_SAVED_MODEL'); + done(); + }; + + model.createExtractJob( + URI, + {format: 'ml_tf_saved_model'}, + assert.ifError + ); + }); + + it('ML_XGBOOST_BOOSTER', done => { + model.bigQuery.createJob = (reqOpts: JobOptions) => { + const extract = reqOpts.configuration!.extract!; + assert.strictEqual(extract.destinationFormat, 'ML_XGBOOST_BOOSTER'); + done(); + }; + + model.createExtractJob( + URI, + {format: 'ml_xgboost_booster'}, + assert.ifError + ); + }); + + it('should parse out full gs:// urls from files', done => { + isCustomTypeOverride = () => { + return true; + }; + + model.bigQuery.createJob = (reqOpts: JobOptions) => { + assert.deepStrictEqual( + reqOpts.configuration!.extract!.destinationUris, + ['gs://' + FILE.bucket.name + '/' + FILE.name] + ); + done(); + }; + + model.createExtractJob(FILE, assert.ifError); + }); + + it('should check if a destination is a File', done => { + isCustomTypeOverride = (dest: {}, type: string) => { + assert.strictEqual(dest, FILE); + assert.strictEqual(type, 'storage/file'); + setImmediate(done); + return true; + }; + + model.createExtractJob(FILE, assert.ifError); + }); + + it('should throw if a destination is not a string or a File', () => { + isCustomTypeOverride = () => { + return false; + }; + + assert.throws(() => { + model.createExtractJob({}, util.noop); + }, /Destination must be a string or a File object/); + + assert.throws(() => { + model.createExtractJob([FILE, {}], util.noop); + }, /Destination must be a string or a File object/); + }); + + it('should throw if a provided format is not recognized', () => { + assert.throws(() => { + model.createExtractJob( + URI, + {format: 'interpretive_dance'}, + util.noop + ); + }, /Destination format not recognized/); + }); + + it('should accept a job prefix', done => { + const fakeJobPrefix = 'abc-'; + const options = { + jobPrefix: fakeJobPrefix, + }; + + model.bigQuery.createJob = ( + reqOpts: JobOptions, + callback: Function + ) => { + assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.extract as any).jobPrefix, + undefined + ); + callback(); // the done fn + }; + + model.createExtractJob(URI, options, done); + }); + + it('should accept a job id', done => { + const jobId = 'job-id'; + const options = {jobId}; + + model.bigQuery.createJob = ( + reqOpts: JobOptions, + callback: Function + ) => { + assert.strictEqual(reqOpts.jobId, jobId); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.extract as any).jobId, + undefined + ); + callback(); // the done fn + }; + + model.createExtractJob(URI, options, done); + }); + + it('should pass the callback to createJob', done => { + model.bigQuery.createJob = ( + reqOpts: JobOptions, + callback: Function + ) => { + assert.strictEqual(done, callback); + callback(); // the done fn + }; + + model.createExtractJob(URI, {}, done); + }); + + it('should optionally accept options', done => { + model.bigQuery.createJob = ( + reqOpts: JobOptions, + callback: Function + ) => { + assert.strictEqual(done, callback); + callback(); // the done fn + }; + + model.createExtractJob(URI, done); + }); + }); + }); + + describe('extract', () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let fakeJob: any; + + beforeEach(() => { + fakeJob = new EventEmitter(); + model.createExtractJob = ( + destination: {}, + metadata: {}, + callback: Function + ) => { + callback(null, fakeJob); + }; + }); + + it('should pass the arguments to createExtractJob', done => { + const fakeDestination = {}; + const fakeMetadata = {}; + + model.createExtractJob = (destination: {}, metadata: {}) => { + assert.strictEqual(destination, fakeDestination); + assert.strictEqual(metadata, fakeMetadata); + done(); + }; + + model.extract(fakeDestination, fakeMetadata, assert.ifError); + }); + + it('should optionally accept metadata', done => { + model.createExtractJob = (destination: {}, metadata: {}) => { + assert.deepStrictEqual(metadata, {}); + done(); + }; + + model.extract({}, assert.ifError); + }); + + it('should return any createExtractJob errors', done => { + const error = new Error('err'); + const response = {}; + + model.createExtractJob = ( + destination: {}, + metadata: {}, + callback: Function + ) => { + callback(error, null, response); + }; + + model.extract({}, (err: Error, resp: {}) => { + assert.strictEqual(err, error); + assert.strictEqual(resp, response); + done(); + }); + }); + + it('should return any job errors', done => { + const error = new Error('err'); + + model.extract({}, (err: Error) => { + assert.strictEqual(err, error); + done(); + }); + + fakeJob.emit('error', error); + }); + + it('should return the metadata on complete', done => { + const metadata = {}; + + model.extract({}, (err: Error, resp: {}) => { + assert.ifError(err); + assert.strictEqual(resp, metadata); + done(); + }); + + fakeJob.emit('complete', metadata); + }); + }); +}); diff --git a/test/routine.ts b/test/routine.ts new file mode 100644 index 00000000..7864ae3f --- /dev/null +++ b/test/routine.ts @@ -0,0 +1,159 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {ServiceObject, ServiceObjectConfig, util} from '@google-cloud/common'; +import * as pfy from '@google-cloud/promisify'; +import * as assert from 'assert'; +import {describe, it, before, beforeEach} from 'mocha'; +import * as extend from 'extend'; +import * as proxyquire from 'proxyquire'; + +import * as _root from '../src'; + +interface CalledWithRoutine extends ServiceObject { + calledWith_: Array<{ + parent: {}; + baseUrl: string; + id: string; + methods: string[]; + createMethod: Function; + }>; +} + +let promisified = false; +const fakePfy = Object.assign({}, pfy, { + promisifyAll: (c: Function, options: pfy.PromisifyAllOptions) => { + if (c.name === 'Routine') { + assert.strictEqual(typeof options, 'undefined'); + promisified = true; + } + }, +}); + +class FakeServiceObject extends ServiceObject { + calledWith_: IArguments; + constructor(config: ServiceObjectConfig) { + super(config); + // eslint-disable-next-line prefer-rest-params + this.calledWith_ = arguments; + } +} + +describe('BigQuery/Routine', () => { + const DATASET = ({ + id: 'kittens', + parent: {}, + createRoutine: util.noop, + } as {}) as _root.Dataset; + const ROUTINE_ID = 'my_routine'; + + // tslint:disable-next-line variable-name + let Routine: typeof _root.Routine; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let routine: any; + + before(() => { + Routine = proxyquire('../src/routine', { + '@google-cloud/common': { + ServiceObject: FakeServiceObject, + }, + '@google-cloud/promisify': fakePfy, + }).Routine; + }); + + beforeEach(() => { + routine = new Routine(DATASET, ROUTINE_ID); + }); + + describe('instantiation', () => { + it('should promisify all the things', () => { + assert(promisified); + }); + + it('should inherit from ServiceObject', () => { + assert(routine instanceof ServiceObject); + + const calledWith = (routine as CalledWithRoutine).calledWith_[0]; + + assert.strictEqual(calledWith.parent, DATASET); + assert.strictEqual(calledWith.baseUrl, '/routines'); + assert.strictEqual(calledWith.id, ROUTINE_ID); + assert.deepStrictEqual(calledWith.methods, { + create: true, + delete: true, + exists: true, + get: true, + getMetadata: true, + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }); + }); + + it('should configure create method', done => { + const config = {a: 'b'}; + + const dataset = extend(true, {}, DATASET, { + createRoutine: function(config_: {}, callback: Function) { + assert.strictEqual(this, dataset); + assert.deepStrictEqual(config_, config); + callback(); // done() + }, + }); + + const routine = new Routine(dataset, ROUTINE_ID); + const calledWith = (routine as CalledWithRoutine).calledWith_[0]; + + calledWith.createMethod(config, done); + }); + }); + + describe('setMetadata', () => { + it('should update the metadata', done => { + const currentMetadata = {a: 'b'}; + const newMetadata = {c: 'd'}; + const expectedMetadata = Object.assign({}, currentMetadata, newMetadata); + + routine.getMetadata = (callback: Function) => { + callback(null, currentMetadata); + }; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (FakeServiceObject.prototype as any).setMetadata = function( + metadata: {}, + callback: Function + ) { + assert.strictEqual(this, routine); + assert.deepStrictEqual(metadata, expectedMetadata); + callback!(); // done() + }; + + routine.setMetadata(newMetadata, done); + }); + + it('should return an error if getting metadata fails', done => { + const error = new Error('Error.'); + routine.getMetadata = (callback: Function) => { + callback(error); + }; + + routine.setMetadata({}, (err: Error) => { + assert.strictEqual(err, error); + done(); + }); + }); + }); +}); diff --git a/test/table.ts b/test/table.ts index ebe2bc23..7c16b8ec 100644 --- a/test/table.ts +++ b/test/table.ts @@ -1,25 +1,29 @@ -/** - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {DecorateRequestOptions, ServiceObject, ServiceObjectConfig, util} from '@google-cloud/common'; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + DecorateRequestOptions, + ServiceObject, + ServiceObjectConfig, + util, +} from '@google-cloud/common'; import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; import * as pfy from '@google-cloud/promisify'; import {File} from '@google-cloud/storage'; -import * as arrify from 'arrify'; +import arrify = require('arrify'); import * as assert from 'assert'; +import {describe, it, afterEach, beforeEach, before, after} from 'mocha'; import Big from 'big.js'; import {EventEmitter} from 'events'; import * as extend from 'extend'; @@ -28,30 +32,64 @@ import * as sinon from 'sinon'; import * as stream from 'stream'; import * as uuid from 'uuid'; -import {BigQuery} from '../src'; +import {BigQuery, Query} from '../src/bigquery'; import {Job, JobOptions} from '../src/job'; -import {CopyTableMetadata, JobLoadMetadata, Table, TableOptions, ViewDefinition} from '../src/table'; +import { + CopyTableMetadata, + JobLoadMetadata, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + Table, + ViewDefinition, +} from '../src/table'; +import bigquery from '../src/types'; + +interface CalledWithTable extends ServiceObject { + calledWith_: Array<{ + parent: {}; + baseUrl: string; + id: string; + methods: string[]; + }>; +} let promisified = false; -let makeWritableStreamOverride: Function|null; -let isCustomTypeOverride: Function|null; -const fakeUtil = extend({}, util, { +let makeWritableStreamOverride: Function | null; +let isCustomTypeOverride: Function | null; +const fakeUtil = Object.assign({}, util, { isCustomType: (...args: Array<{}>) => { - return (isCustomTypeOverride || util.isCustomType).apply(null, args); + return (isCustomTypeOverride || util.isCustomType)(...args); }, makeWritableStream: (...args: Array<{}>) => { - (makeWritableStreamOverride || util.makeWritableStream).apply(null, args); + (makeWritableStreamOverride || util.makeWritableStream)(...args); }, - noop: () => {} + noop: () => {}, }); -const fakePfy = extend({}, pfy, { +const fakePfy = Object.assign({}, pfy, { promisifyAll: (c: Function) => { if (c.name === 'Table') { promisified = true; } + pfy.promisifyAll(c); }, }); +async function pReflect(promise: Promise) { + try { + const value = await promise; + return { + isFulfilled: true, + isRejected: false, + value, + }; + } catch (error) { + return { + isFulfilled: false, + isRejected: true, + reason: error, + }; + } +} + let extended = false; const fakePaginator = { paginator: { @@ -68,25 +106,69 @@ const fakePaginator = { streamify: (methodName: string) => { return methodName; }, - } + }, }; -// tslint:disable-next-line no-any -let fakeUuid: any = extend(true, {}, uuid); +let fakeUuid = extend(true, {}, uuid); class FakeServiceObject extends ServiceObject { calledWith_: IArguments; constructor(config: ServiceObjectConfig) { super(config); + // eslint-disable-next-line prefer-rest-params this.calledWith_ = arguments; } } -let sandbox: sinon.SinonSandbox; -beforeEach(() => sandbox = sinon.createSandbox()); -afterEach(() => sandbox.restore()); +interface MakeWritableStreamOptions { + metadata: bigquery.IJob; + request: {uri: string}; +} + +const sandbox = sinon.createSandbox(); describe('BigQuery/Table', () => { + before(() => { + Table = proxyquire('../src/table.js', { + uuid: fakeUuid, + '@google-cloud/common': { + ServiceObject: FakeServiceObject, + util: fakeUtil, + }, + '@google-cloud/paginator': fakePaginator, + '@google-cloud/promisify': fakePfy, + }).Table; + + const tableCached = extend(true, {}, Table); + + // Override all util methods, allowing them to be mocked. Overrides are + // removed before each test. + Object.keys(Table).forEach(tableMethod => { + if (typeof Table[tableMethod] !== 'function') { + return; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + Table[tableMethod] = (...args: any[]) => { + const method = tableOverrides[tableMethod] || tableCached[tableMethod]; + return method(...args); + }; + }); + }); + + beforeEach(() => { + fakeUuid = Object.assign(fakeUuid, uuid); + isCustomTypeOverride = null; + makeWritableStreamOverride = null; + tableOverrides = {}; + table = new Table(DATASET, TABLE_ID); + table.bigQuery.request = util.noop; + table.bigQuery.createJob = util.noop; + sandbox.stub(BigQuery, 'mergeSchemaWithRows_').returnsArg(1); + }); + + afterEach(() => sandbox.restore()); + const DATASET = { id: 'dataset-id', createTable: util.noop, @@ -95,6 +177,7 @@ describe('BigQuery/Table', () => { job: (id: string) => { return {id}; }, + apiEndpoint: 'bigquery.googleapis.com', request: util.noop, }, }; @@ -121,58 +204,17 @@ describe('BigQuery/Table', () => { const LOCATION = 'asia-northeast1'; - // tslint:disable-next-line no-any variable-name + // eslint-disable-next-line @typescript-eslint/no-explicit-any let Table: any; const TABLE_ID = 'kittens'; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let table: any; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let tableOverrides: any = {}; - before(() => { - Table = proxyquire('../src/table.js', { - uuid: fakeUuid, - '@google-cloud/common': { - ServiceObject: FakeServiceObject, - util: fakeUtil, - }, - '@google-cloud/paginator': fakePaginator, - '@google-cloud/promisify': fakePfy, - }).Table; - - const tableCached = extend(true, {}, Table); - - // Override all util methods, allowing them to be mocked. Overrides are - // removed before each test. - Object.keys(Table).forEach(tableMethod => { - if (typeof Table[tableMethod] !== 'function') { - return; - } - - // tslint:disable-next-line no-any - Table[tableMethod] = (...args: any[]) => { - const method = tableOverrides[tableMethod] || tableCached[tableMethod]; - return method(...args); - }; - }); - }); - - beforeEach(() => { - fakeUuid = extend(fakeUuid, uuid); - isCustomTypeOverride = null; - makeWritableStreamOverride = null; - tableOverrides = {}; - table = new Table(DATASET, TABLE_ID); - table.bigQuery.request = util.noop; - table.bigQuery.createJob = util.noop; - sandbox.stub(BigQuery, 'mergeSchemaWithRows_').callsFake((schema, rows) => { - return rows; - }); - }); - describe('instantiation', () => { it('should extend the correct methods', () => { - assert(extended); // See `fakePaginator.extend` + assert(extended); // See `fakePaginator.extend` }); it('should streamify the correct methods', () => { @@ -184,7 +226,7 @@ describe('BigQuery/Table', () => { }); it('should inherit from ServiceObject', done => { - const datasetInstance = extend({}, DATASET, { + const datasetInstance = Object.assign({}, DATASET, { createTable: { bind: (context: {}) => { assert.strictEqual(context, datasetInstance); @@ -196,7 +238,7 @@ describe('BigQuery/Table', () => { const table = new Table(datasetInstance, TABLE_ID); assert(table instanceof ServiceObject); - const calledWith = table.calledWith_[0]; + const calledWith = (table as CalledWithTable).calledWith_[0]; assert.strictEqual(calledWith.parent, datasetInstance); assert.strictEqual(calledWith.baseUrl, '/tables'); @@ -247,7 +289,7 @@ describe('BigQuery/Table', () => { }, }; - const expectedHeaders = extend({}, fakeReqOpts.headers, { + const expectedHeaders = Object.assign({}, fakeReqOpts.headers, { 'If-Match': FAKE_ETAG, }); @@ -274,7 +316,9 @@ describe('BigQuery/Table', () => { describe('createSchemaFromString_', () => { it('should create a schema object from a string', () => { assert.deepStrictEqual( - Table.createSchemaFromString_(SCHEMA_STRING), SCHEMA_OBJECT); + Table.createSchemaFromString_(SCHEMA_STRING), + SCHEMA_OBJECT + ); }); it('should trim names', () => { @@ -327,16 +371,24 @@ describe('BigQuery/Table', () => { this.value = value; } } + class BigQueryInt { + value: {}; + constructor(value: {}) { + this.value = value; + } + } const date = new BigQueryDate('date'); const datetime = new BigQueryDatetime('datetime'); const time = new BigQueryTime('time'); const timestamp = new BigQueryTimestamp('timestamp'); + const integer = new BigQueryInt('integer'); assert.strictEqual(Table.encodeValue_(date), 'date'); assert.strictEqual(Table.encodeValue_(datetime), 'datetime'); assert.strictEqual(Table.encodeValue_(time), 'time'); assert.strictEqual(Table.encodeValue_(timestamp), 'timestamp'); + assert.strictEqual(Table.encodeValue_(integer), 'integer'); }); it('should properly encode arrays', () => { @@ -372,13 +424,13 @@ describe('BigQuery/Table', () => { assert.strictEqual(Table.encodeValue_(new Big('123.456')), '123.456'); assert.strictEqual(Table.encodeValue_(new Big('-123.456')), '-123.456'); assert.strictEqual( - Table.encodeValue_( - new Big('99999999999999999999999999999.999999999')), - '99999999999999999999999999999.999999999'); + Table.encodeValue_(new Big('99999999999999999999999999999.999999999')), + '99999999999999999999999999999.999999999' + ); assert.strictEqual( - Table.encodeValue_( - new Big('-99999999999999999999999999999.999999999')), - '-99999999999999999999999999999.999999999'); + Table.encodeValue_(new Big('-99999999999999999999999999999.999999999')), + '-99999999999999999999999999999.999999999' + ); }); }); @@ -454,33 +506,35 @@ describe('BigQuery/Table', () => { assert.strictEqual(formatted.view.useLegacySql, false); }); - it('should allow the view option to be passed as a pre-formatted object', - () => { - const view: ViewDefinition = {query: 'abc', useLegacySql: false}; + it('should allow the view option to be passed as a pre-formatted object', () => { + const view: ViewDefinition = {query: 'abc', useLegacySql: false}; - const {view: formattedView} = Table.formatMetadata_({view}); + const {view: formattedView} = Table.formatMetadata_({view}); - assert.deepEqual(formattedView, view); - }); + assert.deepStrictEqual(formattedView, view); + }); }); describe('copy', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let fakeJob: any; beforeEach(() => { fakeJob = new EventEmitter(); - table.createCopyJob = - (destination: {}, metadata: {}, callback: Function) => { - callback(null, fakeJob); - }; + table.createCopyJob = ( + destination: {}, + metadata: {}, + callback: Function + ) => { + callback(null, fakeJob); + }; }); it('should pass the arguments to createCopyJob', done => { const fakeDestination = {}; const fakeMetadata: CopyTableMetadata = { createDisposition: 'CREATE_NEVER', - writeDisposition: 'WRITE_TRUNCATE' + writeDisposition: 'WRITE_TRUNCATE', }; table.createCopyJob = (destination: {}, metadata: {}) => { @@ -505,10 +559,13 @@ describe('BigQuery/Table', () => { const error = new Error('err'); const response = {}; - table.createCopyJob = - (destination: {}, metadata: {}, callback: Function) => { - callback(error, null, response); - }; + table.createCopyJob = ( + destination: {}, + metadata: {}, + callback: Function + ) => { + callback(error, null, response); + }; table.copy({}, (err: Error, resp: {}) => { assert.strictEqual(err, error); @@ -542,15 +599,18 @@ describe('BigQuery/Table', () => { }); describe('copyFrom', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let fakeJob: any; beforeEach(() => { fakeJob = new EventEmitter(); - table.createCopyFromJob = - (sourceTables: {}, metadata: {}, callback: Function) => { - callback(null, fakeJob); - }; + table.createCopyFromJob = ( + sourceTables: {}, + metadata: {}, + callback: Function + ) => { + callback(null, fakeJob); + }; }); it('should pass the arguments to createCopyFromJob', done => { @@ -579,10 +639,13 @@ describe('BigQuery/Table', () => { const error = new Error('err'); const response = {}; - table.createCopyFromJob = - (sourceTables: {}, metadata: {}, callback: Function) => { - callback(error, null, response); - }; + table.createCopyFromJob = ( + sourceTables: {}, + metadata: {}, + callback: Function + ) => { + callback(error, null, response); + }; table.copyFrom({}, (err: Error, resp: {}) => { assert.strictEqual(err, error); @@ -622,22 +685,25 @@ describe('BigQuery/Table', () => { DEST_TABLE = new Table(DATASET, 'destination-table'); }); - it('should throw if a destination is not a Table', () => { - assert.throws(() => { - table.createCopyJob(); - }, /Destination must be a Table/); + it('should throw if a destination is not a Table', async () => { + await assert.rejects( + async () => table.createCopyJob(), + /Destination must be a Table/ + ); - assert.throws(() => { - table.createCopyJob({}); - }, /Destination must be a Table/); + await assert.rejects( + async () => table.createCopyJob({}), + /Destination must be a Table/ + ); - assert.throws(() => { - table.createCopyJob(() => {}); - }, /Destination must be a Table/); + await assert.rejects( + async () => table.createCopyJob(() => {}), + /Destination must be a Table/ + ); }); it('should send correct request to the API', done => { - table.bigQuery.createJob = (reqOpts: DecorateRequestOptions) => { + table.bigQuery.createJob = (reqOpts: JobOptions) => { assert.deepStrictEqual(reqOpts, { configuration: { copy: { @@ -671,8 +737,12 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix); - assert.strictEqual(reqOpts.configuration.copy.jobPrefix, undefined); - callback(); // the done fn + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.copy as any).jobPrefix, + undefined + ); + callback(); // the done fn }; table.createCopyJob(DEST_TABLE, options, done); @@ -681,7 +751,7 @@ describe('BigQuery/Table', () => { it('should use the default location', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.location, LOCATION); - callback(); // the done fn + callback(); // the done fn }; table.location = LOCATION; @@ -694,8 +764,12 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.jobId, jobId); - assert.strictEqual(reqOpts.configuration.copy.jobId, undefined); - callback(); // the done fn + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.copy as any).jobId, + undefined + ); + callback(); // the done fn }; table.createCopyJob(DEST_TABLE, options, done); @@ -704,7 +778,7 @@ describe('BigQuery/Table', () => { it('should pass the callback to createJob', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(done, callback); - callback(); // the done fn + callback(); // the done fn }; table.createCopyJob(DEST_TABLE, {}, done); @@ -713,7 +787,7 @@ describe('BigQuery/Table', () => { it('should optionally accept metadata', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(done, callback); - callback(); // the done fn + callback(); // the done fn }; table.createCopyJob(DEST_TABLE, done); @@ -727,26 +801,30 @@ describe('BigQuery/Table', () => { SOURCE_TABLE = new Table(DATASET, 'source-table'); }); - it('should throw if a source is not a Table', () => { - assert.throws(() => { - table.createCopyFromJob(['table']); - }, /Source must be a Table/); + it('should throw if a source is not a Table', async () => { + await assert.rejects( + async () => table.createCopyFromJob(['table']), + /Source must be a Table/ + ); - assert.throws(() => { - table.createCopyFromJob([SOURCE_TABLE, 'table']); - }, /Source must be a Table/); + await assert.rejects( + async () => table.createCopyFromJob([SOURCE_TABLE, 'table']), + /Source must be a Table/ + ); - assert.throws(() => { - table.createCopyFromJob({}); - }, /Source must be a Table/); + await assert.rejects( + async () => table.createCopyFromJob({}), + /Source must be a Table/ + ); - assert.throws(() => { - table.createCopyFromJob(() => {}); - }, /Source must be a Table/); + await assert.rejects( + async () => table.createCopyFromJob(() => {}), + /Source must be a Table/ + ); }); it('should send correct request to the API', done => { - table.bigQuery.createJob = (reqOpts: DecorateRequestOptions) => { + table.bigQuery.createJob = (reqOpts: JobOptions) => { assert.deepStrictEqual(reqOpts, { configuration: { copy: { @@ -776,7 +854,7 @@ describe('BigQuery/Table', () => { it('should accept multiple source tables', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - assert.deepStrictEqual(reqOpts.configuration.copy.sourceTables, [ + assert.deepStrictEqual(reqOpts.configuration!.copy!.sourceTables, [ { datasetId: SOURCE_TABLE.dataset.id, projectId: SOURCE_TABLE.bigQuery.projectId, @@ -803,8 +881,12 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix); - assert.strictEqual(reqOpts.configuration.copy.jobPrefix, undefined); - callback(); // the done fn + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.copy as any).jobPrefix, + undefined + ); + callback(); // the done fn }; table.createCopyFromJob(SOURCE_TABLE, options, done); @@ -813,7 +895,7 @@ describe('BigQuery/Table', () => { it('should use the default location', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.location, LOCATION); - callback(); // the done fn + callback(); // the done fn }; table.location = LOCATION; @@ -826,8 +908,12 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.jobId, jobId); - assert.strictEqual(reqOpts.configuration.copy.jobId, undefined); - callback(); // the done fn + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.copy as any).jobId, + undefined + ); + callback(); // the done fn }; table.createCopyFromJob(SOURCE_TABLE, options, done); @@ -836,7 +922,7 @@ describe('BigQuery/Table', () => { it('should pass the callback to createJob', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(done, callback); - callback(); // the done fn + callback(); // the done fn }; table.createCopyFromJob(SOURCE_TABLE, {}, done); @@ -845,7 +931,7 @@ describe('BigQuery/Table', () => { it('should optionally accept options', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(done, callback); - callback(); // the done fn + callback(); // the done fn }; table.createCopyFromJob(SOURCE_TABLE, done); @@ -874,7 +960,7 @@ describe('BigQuery/Table', () => { it('should call createJob correctly', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - assert.deepStrictEqual(reqOpts.configuration.extract.sourceTable, { + assert.deepStrictEqual(reqOpts.configuration!.extract!.sourceTable, { datasetId: table.dataset.id, projectId: table.bigQuery.projectId, tableId: table.id, @@ -897,7 +983,7 @@ describe('BigQuery/Table', () => { describe('formats', () => { it('should accept csv', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const extract = reqOpts.configuration.extract; + const extract = reqOpts.configuration!.extract!; assert.strictEqual(extract.destinationFormat, 'CSV'); done(); }; @@ -907,9 +993,11 @@ describe('BigQuery/Table', () => { it('should accept json', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const extract = reqOpts.configuration.extract; + const extract = reqOpts.configuration!.extract!; assert.strictEqual( - extract.destinationFormat, 'NEWLINE_DELIMITED_JSON'); + extract.destinationFormat, + 'NEWLINE_DELIMITED_JSON' + ); done(); }; @@ -918,7 +1006,7 @@ describe('BigQuery/Table', () => { it('should accept avro', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const extract = reqOpts.configuration.extract; + const extract = reqOpts.configuration!.extract!; assert.strictEqual(extract.destinationFormat, 'AVRO'); done(); }; @@ -928,7 +1016,7 @@ describe('BigQuery/Table', () => { it('should accept orc', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const extract = reqOpts.configuration.extract; + const extract = reqOpts.configuration!.extract!; assert.strictEqual(extract.destinationFormat, 'ORC'); done(); }; @@ -938,7 +1026,7 @@ describe('BigQuery/Table', () => { it('should accept parquet', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const extract = reqOpts.configuration.extract; + const extract = reqOpts.configuration!.extract!; assert.strictEqual(extract.destinationFormat, 'PARQUET'); done(); }; @@ -949,9 +1037,10 @@ describe('BigQuery/Table', () => { it('should parse out full gs:// urls from files', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - assert.deepStrictEqual(reqOpts.configuration.extract.destinationUris, [ - 'gs://' + FILE.bucket.name + '/' + FILE.name, - ]); + assert.deepStrictEqual( + reqOpts.configuration!.extract!.destinationUris, + ['gs://' + FILE.bucket.name + '/' + FILE.name] + ); done(); }; @@ -985,7 +1074,7 @@ describe('BigQuery/Table', () => { it('should detect file format if a format is not provided', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const destFormat = reqOpts.configuration.extract.destinationFormat; + const destFormat = reqOpts.configuration!.extract!.destinationFormat; assert.strictEqual(destFormat, 'NEWLINE_DELIMITED_JSON'); done(); }; @@ -995,9 +1084,10 @@ describe('BigQuery/Table', () => { it('should assign the provided format if matched', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - const extract = reqOpts.configuration.extract; + const extract = reqOpts.configuration!.extract!; assert.strictEqual(extract.destinationFormat, 'CSV'); - assert.strictEqual(extract.format, undefined); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + assert.strictEqual((extract as any).format, undefined); done(); }; @@ -1012,8 +1102,12 @@ describe('BigQuery/Table', () => { it('should assign GZIP compression with gzip: true', done => { table.bigQuery.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.configuration.extract.compression, 'GZIP'); - assert.strictEqual(reqOpts.configuration.extract.gzip, undefined); + assert.strictEqual(reqOpts.configuration!.extract!.compression, 'GZIP'); + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.extract as any).gzip, + undefined + ); done(); }; @@ -1028,8 +1122,12 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix); - assert.strictEqual(reqOpts.configuration.extract.jobPrefix, undefined); - callback(); // the done fn + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.extract as any).jobPrefix, + undefined + ); + callback(); // the done fn }; table.createExtractJob(FILE, options, done); @@ -1040,7 +1138,7 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.location, LOCATION); - callback(); // the done fn + callback(); // the done fn }; table.createExtractJob(FILE, done); @@ -1052,8 +1150,12 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(reqOpts.jobId, jobId); - assert.strictEqual(reqOpts.configuration.extract.jobId, undefined); - callback(); // the done fn + assert.strictEqual( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (reqOpts.configuration!.extract as any).jobId, + undefined + ); + callback(); // the done fn }; table.createExtractJob(FILE, options, done); @@ -1062,7 +1164,7 @@ describe('BigQuery/Table', () => { it('should pass the callback to createJob', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(done, callback); - callback(); // the done fn + callback(); // the done fn }; table.createExtractJob(FILE, {}, done); @@ -1071,7 +1173,7 @@ describe('BigQuery/Table', () => { it('should optionally accept options', done => { table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { assert.strictEqual(done, callback); - callback(); // the done fn + callback(); // the done fn }; table.createExtractJob(FILE, done); @@ -1092,12 +1194,21 @@ describe('BigQuery/Table', () => { metadata: {}, }; + let bqCreateJobStub: sinon.SinonStub; + beforeEach(() => { + bqCreateJobStub = sinon + .stub(table.bigQuery, 'createJob') + .resolves([JOB, JOB.metadata]); isCustomTypeOverride = () => { return true; }; }); + afterEach(() => { + bqCreateJobStub.restore(); + }); + it('should accept just a File and a callback', done => { table.createWriteStream_ = () => { const ws = new stream.Writable(); @@ -1116,11 +1227,6 @@ describe('BigQuery/Table', () => { }); }); - it('should return a stream when a string is given', () => { - sandbox.stub(table, 'createWriteStream_').returns(new stream.Writable()); - assert(table.createLoadJob(FILEPATH) instanceof stream.Stream); - }); - it('should infer the file format from the given filepath', done => { table.createWriteStream_ = (metadata: JobLoadMetadata) => { assert.strictEqual(metadata.sourceFormat, 'NEWLINE_DELIMITED_JSON'); @@ -1179,138 +1285,147 @@ describe('BigQuery/Table', () => { table.createLoadJob(FILE, assert.ifError); }); - it('should throw if a File object is not provided', () => { + it('should throw if a File object is not provided', async () => { isCustomTypeOverride = () => { return false; }; - - assert.throws(() => { - table.createLoadJob({}); - }, /Source must be a File object/); + await assert.rejects( + async () => table.createLoadJob({}), + /Source must be a File object/ + ); }); - it('should convert File objects to gs:// urls', done => { - table.bigQuery.createJob = (reqOpts: JobOptions) => { - const sourceUri = reqOpts.configuration.load.sourceUris[0]; - assert.strictEqual( - sourceUri, 'gs://' + FILE.bucket.name + '/' + FILE.name); - done(); - }; - - table.createLoadJob(FILE, assert.ifError); + it('should convert File objects to gs:// urls', async () => { + await table.createLoadJob(FILE); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + configuration: { + load: { + sourceUris: ['gs://' + FILE.bucket.name + '/' + FILE.name], + }, + }, + }) + ); }); - it('should infer the file format from a File object', done => { - table.bigQuery.createJob = (reqOpts: JobOptions) => { - const sourceFormat = reqOpts.configuration.load.sourceFormat; - assert.strictEqual(sourceFormat, 'NEWLINE_DELIMITED_JSON'); - done(); - }; - - table.createLoadJob(FILE, assert.ifError); + it('should infer the file format from a File object', async () => { + await table.createLoadJob(FILE); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + configuration: { + load: { + sourceFormat: 'NEWLINE_DELIMITED_JSON', + }, + }, + }) + ); }); - it('should not override a provided format with a File', done => { - table.bigQuery.createJob = (reqOpts: JobOptions) => { - const sourceFormat = reqOpts.configuration.load.sourceFormat; - assert.strictEqual(sourceFormat, 'NEWLINE_DELIMITED_JSON'); - done(); - }; - - table.createLoadJob( - FILE, { - sourceFormat: 'NEWLINE_DELIMITED_JSON', + it('should not override a provided format with a File', async () => { + await table.createLoadJob(FILE, {sourceFormat: 'AVRO'}); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + configuration: { + load: { + sourceFormat: 'AVRO', + }, }, - assert.ifError); + }) + ); }); - it('should pass the callback to createJob', done => { - table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { - assert.strictEqual(done, callback); - callback(); // the done fn - }; - - table.createLoadJob(FILE, {}, done); + it('should use bigQuery.createJob', async () => { + await table.createLoadJob(FILE, {}); + assert(bqCreateJobStub.calledOnce); }); - it('should optionally accept options', done => { - table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { - assert.strictEqual(done, callback); - callback(); // the done fn - }; - - table.createLoadJob(FILE, done); + it('should optionally accept options', async () => { + await table.createLoadJob(FILE); + assert(bqCreateJobStub.calledOnce); }); - it('should set the job prefix', done => { - const fakeJobPrefix = 'abc'; - - table.bigQuery.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix); - assert.strictEqual(reqOpts.configuration.load.jobPrefix, undefined); - done(); - }; - - table.createLoadJob( - FILE, { - jobPrefix: fakeJobPrefix, + it('should set the job prefix', async () => { + const jobPrefix = 'abc'; + await table.createLoadJob(FILE, {jobPrefix}); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + jobPrefix, + configuration: { + load: { + jobPrefix: undefined, + }, }, - assert.ifError); + }) + ); }); - it('should use the default location', done => { + it('should use the default location', async () => { const table = new Table(DATASET, TABLE_ID, {location: LOCATION}); - - table.bigQuery.createJob = (reqOpts: JobOptions, callback: Function) => { - assert.strictEqual(reqOpts.location, LOCATION); - callback(); // the done fn - }; - - table.createLoadJob(FILE, done); + await table.createLoadJob(FILE); + assert(bqCreateJobStub.calledWithMatch({location: LOCATION})); }); - it('should accept a job id', done => { + it('should accept a job id', async () => { const jobId = 'job-id'; - const options = {jobId}; - - table.bigQuery.createJob = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.jobId, jobId); - assert.strictEqual(reqOpts.configuration.load.jobId, undefined); - done(); - }; - - table.createLoadJob(FILE, options, assert.ifError); + await table.createLoadJob(FILE, {jobId}); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + jobId, + configuration: { + load: { + jobId: undefined, + }, + }, + }) + ); }); describe('formats', () => { - it('should accept csv', done => { - table.bigQuery.createJob = (reqOpts: JobOptions) => { - const load = reqOpts.configuration.load; - assert.strictEqual(load.sourceFormat, 'CSV'); - done(); - }; - - table.createLoadJob(FILE, {format: 'csv'}, assert.ifError); + it('should accept csv', async () => { + await table.createLoadJob(FILE, {format: 'csv'}); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + configuration: { + load: { + sourceFormat: 'CSV', + }, + }, + }) + ); }); - it('should accept json', done => { - table.bigQuery.createJob = (reqOpts: JobOptions) => { - const load = reqOpts.configuration.load; - assert.strictEqual(load.sourceFormat, 'NEWLINE_DELIMITED_JSON'); - done(); - }; - - table.createLoadJob(FILE, {format: 'json'}, assert.ifError); + it('should accept json', async () => { + await table.createLoadJob(FILE, {format: 'json'}); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + configuration: { + load: { + sourceFormat: 'NEWLINE_DELIMITED_JSON', + }, + }, + }) + ); }); - it('should accept avro', done => { - table.bigQuery.createJob = (reqOpts: JobOptions) => { - const load = reqOpts.configuration.load; - assert.strictEqual(load.sourceFormat, 'AVRO'); - done(); - }; - - table.createLoadJob(FILE, {format: 'avro'}, assert.ifError); + it('should accept avro', async () => { + await table.createLoadJob(FILE, {format: 'avro'}); + assert(bqCreateJobStub.calledOnce); + assert( + bqCreateJobStub.calledWithMatch({ + configuration: { + load: { + sourceFormat: 'AVRO', + }, + }, + }) + ); }); }); }); @@ -1320,13 +1435,12 @@ describe('BigQuery/Table', () => { const fakeOptions = {}; const fakeReturnValue = {}; - table.dataset.createQueryJob = - (options: JobOptions, callback: Function) => { - assert.strictEqual(options, fakeOptions); - // tslint:disable-next-line - setImmediate(callback as any); - return fakeReturnValue; - }; + table.dataset.createQueryJob = (options: Query, callback: Function) => { + assert.strictEqual(options, fakeOptions); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + setImmediate(callback as any); + return fakeReturnValue; + }; const returnVal = table.createQueryJob(fakeOptions, done); assert.strictEqual(returnVal, fakeReturnValue); @@ -1356,34 +1470,40 @@ describe('BigQuery/Table', () => { describe('createWriteStream_', () => { describe('formats', () => { it('should accept csv', done => { - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const load = options.metadata.configuration.load; - assert.strictEqual(load.sourceFormat, 'CSV'); - done(); - }; + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const load = options.metadata.configuration!.load!; + assert.strictEqual(load.sourceFormat, 'CSV'); + done(); + }; table.createWriteStream_('csv').emit('writing'); }); it('should accept json', done => { - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const load = options.metadata.configuration.load; - assert.strictEqual(load.sourceFormat, 'NEWLINE_DELIMITED_JSON'); - done(); - }; + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const load = options.metadata.configuration!.load!; + assert.strictEqual(load.sourceFormat, 'NEWLINE_DELIMITED_JSON'); + done(); + }; table.createWriteStream_('json').emit('writing'); }); it('should accept avro', done => { - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const load = options.metadata.configuration.load; - assert.strictEqual(load.sourceFormat, 'AVRO'); - done(); - }; + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const load = options.metadata.configuration!.load!; + assert.strictEqual(load.sourceFormat, 'AVRO'); + done(); + }; table.createWriteStream_('avro').emit('writing'); }); @@ -1396,31 +1516,46 @@ describe('BigQuery/Table', () => { return expectedSchema; }; - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const load = options.metadata.configuration.load; - assert.deepStrictEqual(load.schema, expectedSchema); - done(); - }; + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const load = options.metadata.configuration!.load!; + assert.deepStrictEqual(load.schema, expectedSchema); + done(); + }; table.createWriteStream_({schema: SCHEMA_STRING}).emit('writing'); }); - it('should throw if a given source format is not recognized', () => { - assert.throws(() => { - table.createWriteStream_('zip'); - }, /Source format not recognized/); - - assert.throws(() => { - table.createWriteStream_({ - sourceFormat: 'zip', - }); - }, /Source format not recognized/); + it('should override destination table', done => { + const expectedMetadata = { + destinationTable: { + projectId: 'projectId-override', + datasetId: 'datasetId-override', + tableId: 'tableId-override', + }, + }; + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + assert.deepStrictEqual( + options.metadata.configuration?.load?.destinationTable, + expectedMetadata.destinationTable + ); + done(); + }; - assert.doesNotThrow(() => { - table.createWriteStream_(); - table.createWriteStream_({}); - }); + table + .createWriteStream_({ + destinationTable: { + projectId: 'projectId-override', + datasetId: 'datasetId-override', + tableId: 'tableId-override', + }, + }) + .emit('writing'); }); it('should return a stream', () => { @@ -1428,67 +1563,66 @@ describe('BigQuery/Table', () => { }); describe('writable stream', () => { - // tslint:disable-next-line no-any - let fakeJob: any; + let fakeJob: EventEmitter; let fakeJobId: string; beforeEach(() => { fakeJob = new EventEmitter(); fakeJobId = uuid.v4(); - - fakeUuid.v4 = () => { - return fakeJobId; - }; + sandbox.stub(fakeUuid, 'v4').returns(fakeJobId); }); it('should make a writable stream when written to', done => { - let stream: stream.Writable; - makeWritableStreamOverride = (s: {}) => { assert.strictEqual(s, stream); done(); }; - - stream = table.createWriteStream_(); + const stream = table.createWriteStream_(); stream.emit('writing'); }); it('should pass extended metadata', done => { - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - assert.deepStrictEqual(options.metadata, { - configuration: { - load: { - a: 'b', - c: 'd', - destinationTable: { - projectId: table.bigQuery.projectId, - datasetId: table.dataset.id, - tableId: table.id, - }, - }, - }, - jobReference: { + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + assert.deepStrictEqual(options.metadata, { + configuration: { + load: { + a: 'b', + c: 'd', + destinationTable: { projectId: table.bigQuery.projectId, - jobId: fakeJobId, - location: undefined, + datasetId: table.dataset.id, + tableId: table.id, }, - }); - done(); - }; + }, + }, + jobReference: { + projectId: table.bigQuery.projectId, + jobId: fakeJobId, + location: undefined, + }, + }); + done(); + }; table.createWriteStream_({a: 'b', c: 'd'}).emit('writing'); }); it('should pass the correct request uri', done => { - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const uri = - 'https://www.googleapis.com/upload/bigquery/v2/projects/' + - table.bigQuery.projectId + '/jobs'; - assert.strictEqual(options.request.uri, uri); - done(); - }; + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const uri = + table.bigQuery.apiEndpoint + + '/upload/bigquery/v2/projects/' + + table.bigQuery.projectId + + '/jobs'; + assert.strictEqual(options.request.uri, uri); + done(); + }; table.createWriteStream_().emit('writing'); }); @@ -1497,16 +1631,19 @@ describe('BigQuery/Table', () => { const jobPrefix = 'abc-'; const expectedJobId = jobPrefix + fakeJobId; - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const jobId = options.metadata.jobReference.jobId; - assert.strictEqual(jobId, expectedJobId); + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const jobId = options.metadata.jobReference!.jobId; + assert.strictEqual(jobId, expectedJobId); - const config = options.metadata.configuration.load; - assert.strictEqual(config.jobPrefix, undefined); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const config = options.metadata.configuration!.load as any; + assert.strictEqual(config.jobPrefix, undefined); - done(); - }; + done(); + }; table.createWriteStream_({jobPrefix}).emit('writing'); }); @@ -1514,13 +1651,15 @@ describe('BigQuery/Table', () => { it('should use the default location', done => { const table = new Table(DATASET, TABLE_ID, {location: LOCATION}); - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const location = options.metadata.jobReference.location; - assert.strictEqual(location, LOCATION); + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const location = options.metadata.jobReference!.location; + assert.strictEqual(location, LOCATION); - done(); - }; + done(); + }; table.createWriteStream_().emit('writing'); }); @@ -1529,16 +1668,19 @@ describe('BigQuery/Table', () => { const jobId = 'job-id'; const options = {jobId}; - makeWritableStreamOverride = - (stream: stream.Stream, options: JobOptions) => { - const jobReference = options.metadata.jobReference; - assert.strictEqual(jobReference.jobId, jobId); + makeWritableStreamOverride = ( + stream: stream.Stream, + options: MakeWritableStreamOptions + ) => { + const jobReference = options.metadata.jobReference!; + assert.strictEqual(jobReference.jobId, jobId); - const config = options.metadata.configuration.load; - assert.strictEqual(config.jobId, undefined); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const config = options.metadata.configuration!.load as any; + assert.strictEqual(config.jobId, undefined); - done(); - }; + done(); + }; table.createWriteStream_(options).emit('writing'); }); @@ -1554,33 +1696,36 @@ describe('BigQuery/Table', () => { }; table.bigQuery.job = (id: string, options: {}) => { - assert.strictEqual(id, metadata.jobReference.jobId); + assert.strictEqual(id, metadata.jobReference!.jobId); assert.deepStrictEqual(options, { - location: metadata.jobReference.location, + location: metadata.jobReference!.location, }); return fakeJob; }; - makeWritableStreamOverride = - (stream: {}, options: {}, callback: Function) => { - callback(metadata); - }; + makeWritableStreamOverride = ( + stream: {}, + options: {}, + callback: Function + ) => { + callback(metadata); + }; - table.createWriteStream_() - .on('job', - (job: Job) => { - assert.strictEqual(job, fakeJob); - assert.deepStrictEqual(job.metadata, metadata); - done(); - }) - .emit('writing'); + table + .createWriteStream_() + .on('job', (job: Job) => { + assert.strictEqual(job, fakeJob); + assert.deepStrictEqual(job.metadata, metadata); + done(); + }) + .emit('writing'); }); }); }); describe('createWriteStream', () => { let fakeJob: EventEmitter; - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let fakeStream: any; beforeEach(() => { @@ -1649,15 +1794,18 @@ describe('BigQuery/Table', () => { }); describe('extract', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let fakeJob: any; beforeEach(() => { fakeJob = new EventEmitter(); - table.createExtractJob = - (destination: {}, metadata: {}, callback: Function) => { - callback(null, fakeJob); - }; + table.createExtractJob = ( + destination: {}, + metadata: {}, + callback: Function + ) => { + callback(null, fakeJob); + }; }); it('should pass the arguments to createExtractJob', done => { @@ -1686,10 +1834,13 @@ describe('BigQuery/Table', () => { const error = new Error('err'); const response = {}; - table.createExtractJob = - (destination: {}, metadata: {}, callback: Function) => { - callback(error, null, response); - }; + table.createExtractJob = ( + destination: {}, + metadata: {}, + callback: Function + ) => { + callback(error, null, response); + }; table.extract({}, (err: Error, resp: {}) => { assert.strictEqual(err, error); @@ -1724,7 +1875,7 @@ describe('BigQuery/Table', () => { describe('getRows', () => { it('should accept just a callback', done => { - table.request = (reqOpts: JobOptions, callback: Function) => { + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(null, {}); }; table.getRows(done); @@ -1733,7 +1884,7 @@ describe('BigQuery/Table', () => { it('should make correct API request', done => { const options = {a: 'b', c: 'd'}; - table.request = (reqOpts: JobOptions, callback: Function) => { + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { assert.strictEqual(reqOpts.uri, '/data'); assert.strictEqual(reqOpts.qs, options); callback(null, {}); @@ -1746,7 +1897,7 @@ describe('BigQuery/Table', () => { const apiResponse = {}; const error = new Error('Error.'); - table.request = (reqOpts: JobOptions, callback: Function) => { + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(error, apiResponse); }; @@ -1764,23 +1915,26 @@ describe('BigQuery/Table', () => { // Using "Stephen" so you know who to blame for these tests. const rows = [{f: [{v: 'stephen'}]}]; const schema = {fields: [{name: 'name', type: 'string'}]}; + const wrapIntegers = false; const mergedRows = [{name: 'stephen'}]; beforeEach(() => { - // tslint:disable-next-line no-any - table.request = (reqOpts: JobOptions, callback: any) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + table.request = (reqOpts: DecorateRequestOptions, callback: any) => { // Respond with a row, so it grabs the schema. // Use setImmediate to let our getMetadata overwrite process. setImmediate(callback, null, {rows}); }; sandbox.restore(); - sandbox.stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema_, rows_) => { - assert.strictEqual(schema_, schema); - assert.strictEqual(rows_, rows); - return mergedRows; - }); + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema_, rows_, wrapIntegers_) => { + assert.strictEqual(schema_, schema); + assert.strictEqual(rows_, rows); + assert.strictEqual(wrapIntegers_, wrapIntegers); + return mergedRows; + }); }); it('should refresh', done => { @@ -1815,7 +1969,11 @@ describe('BigQuery/Table', () => { // Step 3: execute original complete handler with schema-merged rows. function responseHandler( - err: Error, rows: {}, nextQuery: {}, apiResponse_: {}) { + err: Error, + rows: {}, + nextQuery: {}, + apiResponse_: {} + ) { assert.strictEqual(err, error); assert.strictEqual(rows, null); assert.strictEqual(nextQuery, null); @@ -1828,21 +1986,24 @@ describe('BigQuery/Table', () => { it('should return schema-merged rows', done => { const rows = [{f: [{v: 'stephen'}]}]; const schema = {fields: [{name: 'name', type: 'string'}]}; + const wrapIntegers = false; const merged = [{name: 'stephen'}]; table.metadata = {schema}; - table.request = (reqOpts: JobOptions, callback: Function) => { + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(null, {rows}); }; sandbox.restore(); - sandbox.stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema_, rows_) => { - assert.strictEqual(schema_, schema); - assert.strictEqual(rows_, rows); - return merged; - }); + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema_, rows_, wrapIntegers_) => { + assert.strictEqual(schema_, schema); + assert.strictEqual(rows_, rows); + assert.strictEqual(wrapIntegers_, wrapIntegers); + return merged; + }); table.getRows((err: Error, rows: {}) => { assert.ifError(err); @@ -1856,7 +2017,7 @@ describe('BigQuery/Table', () => { const schema = {fields: [{name: 'name', type: 'string'}]}; table.metadata = {schema}; - table.request = (reqOpts: JobOptions, callback: Function) => { + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(null, {rows}); }; @@ -1874,7 +2035,7 @@ describe('BigQuery/Table', () => { // Set a schema so it doesn't try to refresh the metadata. table.metadata = {schema: {}}; - table.request = (reqOpts: JobOptions, callback: Function) => { + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { callback(null, {pageToken}); }; @@ -1890,17 +2051,144 @@ describe('BigQuery/Table', () => { done(); }); }); - }); - describe('insert', () => { - const fakeInsertId = 'fake-insert-id'; + it('should return selected fields', done => { + const selectedFields = 'age'; + const rows = [{f: [{v: 40}]}]; + const schema = { + fields: [ + {name: 'name', type: 'string'}, + {name: 'age', type: 'INTEGER'}, + ], + }; + const result = [{age: 40}]; - const data = [ - {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, - {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, - {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, - {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, - {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, + table.metadata = {schema}; + + sandbox.restore(); + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(null, {rows}); + }; + + table.getRows({selectedFields}, (err: Error, rows: {}) => { + assert.ifError(err); + assert.deepStrictEqual(rows, result); + done(); + }); + }); + + it('should return selected fields from nested objects', done => { + const selectedFields = 'objects.nested_object.nested_property_1'; + const rows = [ + { + f: [ + { + v: [ + { + v: { + f: [ + { + v: { + f: [ + { + v: 'nested_property_1_value', + }, + ], + }, + }, + ], + }, + }, + ], + }, + ], + }, + ]; + const schema = { + fields: [ + {name: 'name', type: 'string'}, + { + name: 'objects', + type: 'RECORD', + mode: 'REPEATED', + fields: [ + { + name: 'nested_object', + type: 'RECORD', + fields: [ + { + name: 'nested_property', + type: 'STRING', + }, + { + name: 'nested_property_1', + type: 'STRING', + }, + ], + }, + ], + }, + ], + }; + const result = [ + { + objects: [ + { + nested_object: { + nested_property_1: 'nested_property_1_value', + }, + }, + ], + }, + ]; + + table.metadata = {schema}; + + sandbox.restore(); + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(null, {rows}); + }; + + table.getRows({selectedFields}, (err: Error, rows: {}) => { + assert.ifError(err); + assert.deepStrictEqual(rows, result); + done(); + }); + }); + + it('should wrap integers', done => { + const wrapIntegers = {integerTypeCastFunction: sinon.stub()}; + const options = {wrapIntegers}; + const merged = [{name: 'stephen'}]; + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + assert.deepStrictEqual(reqOpts.qs, {}); + callback(null, {}); + }; + + sandbox.restore(); + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema_, rows_, wrapIntegers_) => { + assert.strictEqual(wrapIntegers_, wrapIntegers); + return merged; + }); + + table.getRows(options, done); + }); + }); + + describe('insert', () => { + const fakeInsertId = 'fake-insert-id'; + + const data = [ + {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, + {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, + {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, + {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, + {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'}, ]; const rawData = [ @@ -1920,44 +2208,106 @@ describe('BigQuery/Table', () => { }), }; + const OPTIONS = { + schema: SCHEMA_STRING, + }; + + let clock: sinon.SinonFakeTimers; + let insertSpy: sinon.SinonSpy; + let requestStub: sinon.SinonStub; + + before(() => { + clock = sinon.useFakeTimers() as sinon.SinonFakeTimers; + }); + beforeEach(() => { - fakeUuid.v4 = () => { - return fakeInsertId; - }; + insertSpy = sinon.spy(table, '_insert'); + requestStub = sinon.stub(table, 'request').resolves([{}]); + sandbox.stub(fakeUuid, 'v4').returns(fakeInsertId); + }); + + afterEach(() => { + clock.reset(); + insertSpy.restore(); + }); + + after(() => { + clock.restore(); + }); + + /** + * Only use this method when NOT directly awaiting on `table.insert`, i.e. + * when relying on any of the fake timer async helpers. + * Tests should assert isRejected or isFulfilled. + * @ignore + * @param fn + * @returns {Promise>} + */ + async function reflectAfterTimer(fn: () => Promise) { + // When `fn` rejects/throws, we need to capture this and test + // for it as needed. Using reflection avoids try/catch's potential for + // false-positives. + // Also, defer capturing the settled promise until _after_ the + // internal timer (delay) has been completed. + + const fnPromise: Promise = fn(); + const reflectedPromise = pReflect(fnPromise); + await clock.runAllAsync(); + return reflectedPromise; + } + + it('should throw an error if rows is empty', async () => { + await assert.rejects( + async () => table.insert([]), + /You must provide at least 1 row to be inserted/ + ); }); - it('should throw an error if rows is empty', () => { - assert.throws(() => { - table.insert([]); - }, /You must provide at least 1 row to be inserted\./); + it('should save data', async () => { + await table.insert(data); + assert( + requestStub.calledOnceWithExactly({ + method: 'POST', + uri: '/insertAll', + json: dataApiFormat, + }) + ); }); - it('should save data', done => { - table.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.method, 'POST'); - assert.strictEqual(reqOpts.uri, '/insertAll'); - assert.deepStrictEqual(reqOpts.json, dataApiFormat); - done(); - }; + it('should return a promise if no callback is provided', () => { + const promise = table.insert(data); + assert(promise instanceof Promise); + }); - table.insert(data, done); + it('should resolve to an array on success', async () => { + const resp = await table.insert(data); + assert(Array.isArray(resp)); }); - it('should generate insertId', done => { - table.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.json.rows[0].insertId, fakeInsertId); - done(); - }; + it('should generate insertId', async () => { + await table.insert([data[0]]); + assert( + requestStub.calledOnceWith( + sinon.match.hasNested('json.rows[0].insertId', fakeInsertId) + ) + ); + }); - table.insert([data[0]], done); + it('should omit the insertId if createInsertId is false', async () => { + await table.insert([data[0]], {createInsertId: false}); + assert(requestStub.calledOnce); + assert( + requestStub.calledWithMatch( + ({json}: DecorateRequestOptions) => + json.rows[0].insertId === undefined && + json.createInsertId === undefined + ) + ); }); it('should execute callback with API response', done => { const apiResponse = {insertErrors: []}; - - table.request = (reqOpts: JobOptions, callback: Function) => { - callback(null, apiResponse); - }; + requestStub.resolves([apiResponse]); table.insert(data, (err: Error, apiResponse_: {}) => { assert.ifError(err); @@ -1968,225 +2318,346 @@ describe('BigQuery/Table', () => { it('should execute callback with error & API response', done => { const error = new Error('Error.'); - const apiResponse = {}; - - table.request = (reqOpts: JobOptions, callback: Function) => { - callback(error, apiResponse); - }; + requestStub.rejects(error); table.insert(data, (err: Error, apiResponse_: {}) => { assert.strictEqual(err, error); - assert.strictEqual(apiResponse_, apiResponse); + assert.strictEqual(apiResponse_, null); done(); }); }); - it('should return partial failures', done => { + it('should reject with API error', async () => { + const error = new Error('Error.'); + requestStub.rejects(error); + await assert.rejects(async () => table.insert(data), error); + }); + + it('should return partial failures', async () => { const row0Error = {message: 'Error.', reason: 'notFound'}; const row1Error = {message: 'Error.', reason: 'notFound'}; - - table.request = (reqOpts: JobOptions, callback: Function) => { - callback(null, { + requestStub.resolves([ + { insertErrors: [ {index: 0, errors: [row0Error]}, {index: 1, errors: [row1Error]}, ], - }); - }; + }, + ]); - table.insert(data, (err: Error) => { - assert.strictEqual(err.name, 'PartialFailureError'); + const reflection = await reflectAfterTimer(() => table.insert(data)); + assert(reflection.isRejected); + const {reason} = reflection; + assert.deepStrictEqual((reason as GoogleErrorBody).errors, [ + { + row: dataApiFormat.rows[0].json, + errors: [row0Error], + }, + { + row: dataApiFormat.rows[1].json, + errors: [row1Error], + }, + ]); + }); - assert.deepStrictEqual((err as {} as GoogleErrorBody).errors, [ - { - row: dataApiFormat.rows[0].json, - errors: [row0Error], - }, - { - row: dataApiFormat.rows[1].json, - errors: [row1Error], - }, - ]); + it('should retry partials default max 3', async () => { + const rowError = {message: 'Error.', reason: 'try again plz'}; + requestStub.resetBehavior(); + requestStub.resolves([ + { + insertErrors: [ + {index: 0, errors: [rowError]}, + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + {index: 3, errors: [rowError]}, + ], + }, + ]); - done(); - }); + const reflection = await reflectAfterTimer(() => + table.insert(data, OPTIONS) + ); + assert(reflection.isRejected); + assert.strictEqual(insertSpy.callCount, 4); }); - it('should insert raw data', done => { - table.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.method, 'POST'); - assert.strictEqual(reqOpts.uri, '/insertAll'); - assert.deepStrictEqual(reqOpts.json, {rows: rawData}); - assert.strictEqual(reqOpts.json.raw, undefined); - done(); - }; + it('should retry partials with optional max', async () => { + const partialRetries = 6; + const rowError = {message: 'Error.', reason: 'try again plz'}; + requestStub.resetBehavior(); + requestStub.resolves([ + { + insertErrors: [ + {index: 0, errors: [rowError]}, + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + {index: 3, errors: [rowError]}, + ], + }, + ]); + + const reflection = await reflectAfterTimer(() => + table.insert(data, {...OPTIONS, partialRetries}) + ); + assert(reflection.isRejected); + assert.strictEqual(insertSpy.callCount, partialRetries + 1); + }); + + it('should allow 0 partial retries, but still do it once', async () => { + const rowError = {message: 'Error.', reason: 'try again plz'}; + requestStub.resetBehavior(); + requestStub.resolves([ + { + insertErrors: [ + {index: 0, errors: [rowError]}, + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + {index: 3, errors: [rowError]}, + ], + }, + ]); + + const reflection = await reflectAfterTimer(() => + table.insert(data, {...OPTIONS, partialRetries: 0}) + ); + assert(reflection.isRejected); + assert.strictEqual(insertSpy.callCount, 1); + }); + + it('should keep partial retries option non-negative', async () => { + const rowError = {message: 'Error.', reason: 'try again plz'}; + requestStub.resetBehavior(); + requestStub.resolves([ + { + insertErrors: [ + {index: 0, errors: [rowError]}, + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + {index: 3, errors: [rowError]}, + ], + }, + ]); + + const reflection = await reflectAfterTimer(() => + table.insert(data, {...OPTIONS, partialRetries: -1}) + ); + assert(reflection.isRejected); + assert.strictEqual(insertSpy.callCount, 1); + }); + + it('should retry partial inserts deltas', async () => { + const rowError = {message: 'Error.', reason: 'try again plz'}; + requestStub.resetBehavior(); + requestStub.onCall(0).resolves([ + { + insertErrors: [ + {index: 0, errors: [rowError]}, + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + {index: 3, errors: [rowError]}, + ], + }, + ]); + + requestStub.onCall(1).resolves([ + { + insertErrors: [ + {index: 0, errors: [rowError]}, + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + ], + }, + ]); + + requestStub.onCall(2).resolves([ + { + insertErrors: [ + {index: 1, errors: [rowError]}, + {index: 2, errors: [rowError]}, + ], + }, + ]); + + const goodResponse = [{foo: 'bar'}]; + requestStub.onCall(3).resolves(goodResponse); + const reflection = await reflectAfterTimer(() => + table.insert(data, OPTIONS) + ); + assert(reflection.isFulfilled); + + assert.deepStrictEqual( + requestStub.getCall(0).args[0].json, + dataApiFormat, + 'first call: try all 5' + ); + assert.deepStrictEqual( + requestStub.getCall(1).args[0].json, + {rows: dataApiFormat.rows.slice(0, 4)}, + 'second call: previous failures were 4/5' + ); + assert.deepStrictEqual( + requestStub.getCall(2).args[0].json, + {rows: dataApiFormat.rows.slice(0, 3)}, + 'third call: previous failures were 3/5' + ); + assert.deepStrictEqual( + requestStub.getCall(3).args[0].json, + {rows: dataApiFormat.rows.slice(1, 3)}, + 'fourth call: previous failures were 2/5' + ); + assert(!requestStub.getCall(4), 'fifth call: should not have happened'); + assert.ok(reflection.value); + }); + + it('should insert raw data', async () => { const opts = {raw: true}; - table.insert(rawData, opts, done); + await table.insert(rawData, opts); + assert(requestStub.calledOnce); + + const [reqOpts]: DecorateRequestOptions[] = requestStub.firstCall.args; + assert.strictEqual(reqOpts.method, 'POST'); + assert.strictEqual(reqOpts.uri, '/insertAll'); + assert.deepStrictEqual(reqOpts.json, {rows: rawData}); }); - it('should accept options', done => { + it('should accept options', async () => { const opts = { ignoreUnknownValues: true, skipInvalidRows: true, templateSuffix: 'test', }; - table.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.method, 'POST'); - assert.strictEqual(reqOpts.uri, '/insertAll'); + await table.insert(data, opts); + assert(requestStub.calledOnce); - assert.strictEqual( - reqOpts.json.ignoreUnknownValues, opts.ignoreUnknownValues); - assert.strictEqual(reqOpts.json.skipInvalidRows, opts.skipInvalidRows); - assert.strictEqual(reqOpts.json.templateSuffix, opts.templateSuffix); + const [reqOpts]: DecorateRequestOptions[] = requestStub.firstCall.args; + assert.strictEqual(reqOpts.method, 'POST'); + assert.strictEqual(reqOpts.uri, '/insertAll'); - assert.deepStrictEqual(reqOpts.json.rows, dataApiFormat.rows); - done(); - }; + assert.strictEqual( + reqOpts.json.ignoreUnknownValues, + opts.ignoreUnknownValues + ); + assert.strictEqual(reqOpts.json.skipInvalidRows, opts.skipInvalidRows); + assert.strictEqual(reqOpts.json.templateSuffix, opts.templateSuffix); - table.insert(data, opts, done); + assert.deepStrictEqual(reqOpts.json.rows, dataApiFormat.rows); }); describe('create table and retry', () => { - const OPTIONS = { - autoCreate: true, - schema: SCHEMA_STRING, - }; - - // tslint:disable-next-line no-any - let _setTimeout: any; - // tslint:disable-next-line no-any - let _random: any; - - before(() => { - _setTimeout = global.setTimeout; - _random = Math.random; - }); + let createStub: sinon.SinonStub; + let insertCreateSpy: sinon.SinonSpy; beforeEach(() => { - sandbox.stub(global, 'setTimeout').callsFake(cb => { - cb(); - return {} as NodeJS.Timeout; - }); - Math.random = _random; - table.request = (reqOpts: JobOptions, callback: Function) => { - callback({code: 404}); - }; - table.create = (reqOpts: JobOptions, callback: Function) => { - callback(null); - }; + insertCreateSpy = sinon.spy(table, '_insertAndCreateTable'); + createStub = sinon.stub(table, 'create').resolves([{}]); + requestStub.onFirstCall().rejects({code: 404}); }); - after(() => { - global.setTimeout = _setTimeout; - Math.random = _random; + afterEach(() => { + insertCreateSpy.restore(); + createStub.restore(); }); - it('should throw if autoCreate is set with no schema', () => { - const options = { - autoCreate: true, - }; + it('should not include the schema in the insert request', async () => { + requestStub.reset(); + requestStub.resolves([{}]); - assert.throws(() => { - table.insert(data, options); - }, /Schema must be provided in order to auto-create Table\./); + await table.insert(data, OPTIONS); + assert(requestStub.calledOnce); + assert.strictEqual( + requestStub.firstCall.lastArg.json.schema, + undefined + ); }); - it('should not include the schema in the insert request', done => { - table.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.json.schema, undefined); - assert.strictEqual(reqOpts.json.autoCreate, undefined); - done(); - }; - - table.insert(data, OPTIONS, assert.ifError); + it('should attempt to create table if not created', async () => { + const reflection = await reflectAfterTimer(() => + table.insert(data, OPTIONS) + ); + assert(reflection.isFulfilled); + assert(createStub.calledOnce); + assert.strictEqual(createStub.firstCall.lastArg.schema, SCHEMA_STRING); }); - it('should set a timeout to create the table', done => { - const fakeRandomValue = Math.random(); - - Math.random = () => { - return fakeRandomValue; - }; - - sandbox.restore(); - sandbox.stub(global, 'setTimeout').callsFake((callback, delay) => { - assert.strictEqual(delay, fakeRandomValue * 60000); - callback(); - return {} as NodeJS.Timeout; - }); - - table.create = (reqOpts: JobOptions) => { - assert.strictEqual(reqOpts.schema, SCHEMA_STRING); - done(); - }; - - table.insert(data, OPTIONS, assert.ifError); + it('should set a timeout to insert rows in the created table', async () => { + // the implementation uses an explicit 60s delay + // so this tests at various intervals + const expectedDelay = 60000; + const firstCheckDelay = 50000; + const remainingCheckDelay = expectedDelay - firstCheckDelay; + + pReflect(table.insert(data, OPTIONS)); // gracefully handle async errors + assert(insertCreateSpy.calledOnce); // just called `insert`, that's 1 so far + + await clock.tickAsync(firstCheckDelay); // first 50s + assert(insertCreateSpy.calledOnce); + assert(createStub.calledOnce, 'must create table before inserting'); + + await clock.tickAsync(remainingCheckDelay); // first 50s + 10s = 60s + assert(insertCreateSpy.calledTwice); + assert.strictEqual(insertCreateSpy.secondCall.args[0], data); + assert.strictEqual(insertCreateSpy.secondCall.args[1], OPTIONS); + + await clock.runAllAsync(); // for good measure + assert( + insertCreateSpy.calledTwice, + 'should not have called insert again' + ); }); - it('should return table creation errors', done => { + it('should reject on table creation errors', async () => { const error = new Error('err.'); - const response = {}; - - table.create = (reqOpts: JobOptions, callback: Function) => { - callback(error, null, response); - }; + createStub.rejects(error); - table.insert(data, OPTIONS, (err: Error, resp: {}) => { - assert.strictEqual(err, error); - assert.strictEqual(resp, response); - done(); - }); + const reflection = await reflectAfterTimer(() => + table.insert(data, OPTIONS) + ); + assert(reflection.isRejected); + assert.strictEqual(reflection.reason, error); }); - it('should ignore 409 errors', done => { - table.create = (reqOpts: JobOptions, callback: Function) => { - callback({code: 409}); - }; - - let timeouts = 0; - sandbox.restore(); - sandbox.stub(global, 'setTimeout').callsFake((callback, delay) => { - if (++timeouts === 2) { - assert.strictEqual(delay, 60000); - done(); - } - callback(null); - return {} as NodeJS.Timeout; - }); - - table.insert(data, OPTIONS, assert.ifError); + it('should ignore 409 errors', async () => { + createStub.rejects({code: 409}); + + const reflection = await reflectAfterTimer(() => + table.insert(data, OPTIONS) + ); + assert(reflection.isFulfilled); + assert(createStub.calledOnce); + assert(insertCreateSpy.calledTwice); + assert.strictEqual(insertCreateSpy.secondCall.args[0], data); + assert.strictEqual(insertCreateSpy.secondCall.args[1], OPTIONS); }); - it('should retry the insert', done => { - const response = {}; - let attempts = 0; - - table.request = (reqOpts: JobOptions, callback: Function) => { - assert.strictEqual(reqOpts.method, 'POST'); - assert.strictEqual(reqOpts.uri, '/insertAll'); - assert.deepStrictEqual(reqOpts.json, dataApiFormat); - - if (++attempts === 2) { - callback(null, response); - return; - } - - callback({code: 404}); - }; - - table.insert(data, OPTIONS, (err: Error, resp: {}) => { - assert.ifError(err); - assert.strictEqual(resp, response); - done(); - }); + it('should retry the insert', async () => { + const errorResponse = {code: 404}; + requestStub.onFirstCall().rejects(errorResponse); + requestStub.onSecondCall().rejects(errorResponse); + + const goodResponse = [{foo: 'bar'}]; + requestStub.onThirdCall().resolves(goodResponse); + + const reflection = await reflectAfterTimer(() => + table.insert(data, OPTIONS) + ); + assert(reflection.isFulfilled); + assert(requestStub.calledThrice); + assert( + requestStub.alwaysCalledWithMatch({ + method: 'POST', + uri: '/insertAll', + json: dataApiFormat, + }) + ); + assert.deepStrictEqual(reflection.value, goodResponse); }); }); }); describe('load', () => { - // tslint:disable-next-line no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any let fakeJob: any; beforeEach(() => { @@ -2278,17 +2749,194 @@ describe('BigQuery/Table', () => { assert.strictEqual(data, fakeMetadata); return formattedMetadata; }; - - // tslint:disable-next-line:no-any + // eslint-disable-next-line @typescript-eslint/no-explicit-any (FakeServiceObject.prototype as any).setMetadata = function( - metadata: {}, callback: Function) { + metadata: {}, + callback: Function + ) { assert.strictEqual(this, table); assert.strictEqual(metadata, formattedMetadata); assert.strictEqual(callback, done); - callback!(null); // the done fn + callback!(null); // the done fn }; table.setMetadata(fakeMetadata, done); }); }); + + describe('setIamPolicy', () => { + const BIGQUERY_DATA_VIEWER = 'roles/bigquery.dataViewer'; + + it('should make correct API request', done => { + const binding = {role: BIGQUERY_DATA_VIEWER, members: ['Turing']}; + const policy = {bindings: [binding], etag: 'abc'}; + + table.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.json.policy, policy); + assert.strictEqual(reqOpts.uri, '/:setIamPolicy'); + assert.strictEqual(reqOpts.method, 'POST'); + done(); + }; + + table.setIamPolicy(policy); + }); + + it('should accept a callback', () => { + const binding = {role: BIGQUERY_DATA_VIEWER, members: ['Turing']}; + const policy = {bindings: [binding], etag: 'abc'}; + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(null, policy); + }; + + table.setIamPolicy(policy, (err: Error, resp: {}) => { + assert.ifError(err); + assert.strictEqual(resp, policy); + }); + }); + + it('should accept options', done => { + const policy = {}; + const updateMask = 'binding'; + + table.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.json.policy, policy); + assert.strictEqual(reqOpts.json.updateMask, updateMask); + assert.strictEqual(reqOpts.uri, '/:setIamPolicy'); + assert.strictEqual(reqOpts.method, 'POST'); + done(); + }; + + table.setIamPolicy(policy, {updateMask}); + }); + + it('should throw with invalid policy version', () => { + const policy = {version: 100}; + assert.throws(() => { + table.setIamPolicy(policy, util.noop); + }, /Only IAM policy version 1 is supported./); + }); + + it('should return errors', () => { + const policy = {}; + const error = new Error('a bad thing!'); + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(error, null); + }; + + table.setIamPolicy(policy, (err: Error, resp: {}) => { + assert.strictEqual(err, error); + assert.strictEqual(resp, null); + }); + }); + }); + + describe('getIamPolicy', () => { + it('should make correct API call', done => { + table.request = (reqOpts: DecorateRequestOptions) => { + assert.strictEqual(reqOpts.uri, '/:getIamPolicy'); + assert.strictEqual(reqOpts.method, 'POST'); + done(); + }; + + table.getIamPolicy(); + }); + + it('should accept just a callback', () => { + const policy = {}; + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + assert.strictEqual(reqOpts.uri, '/:getIamPolicy'); + assert.strictEqual(reqOpts.method, 'POST'); + callback(null, policy); + }; + + table.getIamPolicy((err: Error, resp: {}) => { + assert.ifError(err); + assert.strictEqual(resp, policy); + }); + }); + + it('should accept options', () => { + const policy = {}; + const options = {requestedPolicyVersion: 1}; + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + assert.deepStrictEqual(reqOpts.json.options, options); + assert.strictEqual(reqOpts.uri, '/:getIamPolicy'); + assert.strictEqual(reqOpts.method, 'POST'); + callback(null, policy); + }; + + table.getIamPolicy(options, (err: Error, resp: {}) => { + assert.ifError(err); + assert.strictEqual(resp, policy); + }); + }); + + it('should throw with invalid policy version', () => { + const options = {requestedPolicyVersion: 100}; + assert.throws(() => { + table.getIamPolicy(options, util.noop); + }, /Only IAM policy version 1 is supported./); + }); + + it('should return errors', () => { + const error = new Error('a bad thing!'); + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(error, null); + }; + + table.getIamPolicy((err: Error, resp: {}) => { + assert.strictEqual(err, error); + assert.strictEqual(resp, null); + }); + }); + }); + + describe('testIamPermissions', () => { + it('should make correct API call', () => { + const permissions = ['bigquery.do.stuff']; + + table.request = (reqOpts: DecorateRequestOptions) => { + assert.strictEqual(reqOpts.uri, '/:testIamPermissions'); + assert.strictEqual(reqOpts.method, 'POST'); + assert.deepStrictEqual(reqOpts.json, {permissions}); + }; + + table.testIamPermissions(permissions, util.noop); + }); + + it('should accept a callback', () => { + const permissions = ['bigquery.do.stuff']; + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + assert.deepStrictEqual(reqOpts.json.permissions, permissions); + assert.strictEqual(reqOpts.uri, '/:testIamPermissions'); + assert.strictEqual(reqOpts.method, 'POST'); + callback(null, {permissions}); + }; + + table.testIamPermissions(permissions, (err: Error, resp: {}) => { + assert.ifError(err); + assert.deepStrictEqual(resp, {permissions}); + }); + }); + + it('should return errors', () => { + const permissions = ['bigquery.do.stuff']; + const error = new Error('a bad thing!'); + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + callback(error, null); + }; + + table.testIamPermissions(permissions, (err: Error, resp: {}) => { + assert.strictEqual(err, error); + assert.strictEqual(resp, null); + }); + }); + }); }); diff --git a/tslint.json b/tslint.json deleted file mode 100644 index 617dc975..00000000 --- a/tslint.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "gts/tslint.json" -}
If the callback is omitted a Promise will be returnedTo control how many API requests are made and page through the results manually, set `autoPaginate` to `false`.If the callback is omitted, we'll return a Promise.To control how many API requests are made and page through the results manually, set `autoPaginate` to `false`.If the callback is omitted a Promise will be returnedInstall the client library with npm:Import the client libraryCreate a client that uses Application - * Default Credentials (ADC):Create a client with explicit - * credentials:include:samples/quickstart.jsInstall the client library with npm:Import the client libraryCreate a client that uses Application - * Default Credentials (ADC):Create a client with explicit - * credentials:include:samples/quickstart.jsIf the callback is omitted we'll return a Promise.If the callback is omitted we'll return a Promise.If the callback is omitted we'll return a Promise.If the callback is omitted we'll return a Promise.If the callback is omitted we'll return a Promise.If the callback is omitted a Promise will be returnedIf the callback is omitted a Promise will be returnedIf the callback is omitted a Promise will be returnedIf the callback is omitted a Promise will be returnedIf the callback is omitted a Promise will be returnedIf the callback is omitted a Promise will be returned