From ee5d6ebb824b118db371a48f5f83474bd4dd466b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 23 Mar 2021 10:46:06 -0700 Subject: [PATCH 01/18] chore: update templates (#22) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/9d4e6069-5c18-4f79-97fb-99ebae377691/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/4679e7e415221f03ff2a71e3ffad75b9ec41d87e Source-Link: https://github.com/googleapis/synthtool/commit/33366574ffb9e11737b3547eb6f020ecae0536e8 Source-Link: https://github.com/googleapis/synthtool/commit/d1bb9173100f62c0cfc8f3138b62241e7f47ca6a Source-Link: https://github.com/googleapis/synthtool/commit/778d8beae28d6d87eb01fdc839a4b4d966ed2ebe Source-Link: https://github.com/googleapis/synthtool/commit/573f7655311b553a937f9123bee17bf78497db95 Source-Link: https://github.com/googleapis/synthtool/commit/ba960d730416fe05c50547e975ce79fcee52c671 Source-Link: https://github.com/googleapis/synthtool/commit/16ec872dd898d7de6e1822badfac32484b5d9031 Source-Link: https://github.com/googleapis/synthtool/commit/fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483 --- .github/header-checker-lint.yml | 15 +++++++++++++++ .kokoro/build.sh | 16 ++++++++++------ .kokoro/docs/docs-presubmit.cfg | 11 +++++++++++ .trampolinerc | 1 + CONTRIBUTING.rst | 22 ++++++++++++++++++---- MANIFEST.in | 4 ++-- noxfile.py | 12 ++++++++++++ synth.metadata | 7 ++++--- 8 files changed, 73 insertions(+), 15 deletions(-) create mode 100644 .github/header-checker-lint.yml diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 0000000..fc281c0 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 762e3c9..6c4a64d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-notebooks +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-notebooks" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,16 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 1118107..bad28ff 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-notebooks/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.trampolinerc b/.trampolinerc index 995ee29..383b6ec 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a37c975..595c4ac 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d1..e783f4c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/noxfile.py b/noxfile.py index a57e24b..70d9c13 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -75,6 +86,7 @@ def default(session): session.install( "mock", "pytest", "pytest-cov", ) + session.install("-e", ".") # Run py.test against the unit tests. diff --git a/synth.metadata b/synth.metadata index 9109ecc..596ea25 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-notebooks.git", - "sha": "7633ad32ba52781688961fa78aecb6fa0b5f4e3e" + "sha": "bf6d2d927132a8c49872bce622eba5b863157f3b" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" } } ], @@ -48,6 +48,7 @@ ".github/ISSUE_TEMPLATE/feature_request.md", ".github/ISSUE_TEMPLATE/support_request.md", ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", ".github/release-please.yml", ".github/snippet-bot.yml", ".gitignore", From aba12e032fe95668494094d84a16c2c65bd5f971 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Sat, 27 Mar 2021 15:06:02 -0600 Subject: [PATCH 02/18] chore: add constraints file (#15) Add constraints file to test lower bounds. These files will not be used until the noxfile is changed in googleapis/synthtool#869. --- testing/constraints-3.10.txt | 0 testing/constraints-3.11.txt | 0 testing/constraints-3.6.txt | 9 +++++++++ testing/constraints-3.7.txt | 0 testing/constraints-3.8.txt | 0 testing/constraints-3.9.txt | 0 6 files changed, 9 insertions(+) create mode 100644 testing/constraints-3.10.txt create mode 100644 testing/constraints-3.11.txt create mode 100644 testing/constraints-3.6.txt create mode 100644 testing/constraints-3.7.txt create mode 100644 testing/constraints-3.8.txt create mode 100644 testing/constraints-3.9.txt diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 0000000..a9e749b --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.0 +proto-plus==1.1.0 \ No newline at end of file diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 0000000..e69de29 From 4999922dc0f6eaebc8aec58929176ab6b87cfdca Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 31 Mar 2021 15:57:55 -0700 Subject: [PATCH 03/18] feat: add `from_service_account_info` (#26) --- .coveragerc | 18 + .gitignore | 4 +- .kokoro/build.sh | 10 + .kokoro/samples/python3.6/periodic-head.cfg | 11 + .kokoro/samples/python3.7/periodic-head.cfg | 11 + .kokoro/samples/python3.8/periodic-head.cfg | 11 + .kokoro/test-samples-against-head.sh | 28 + .kokoro/test-samples-impl.sh | 102 ++++ .kokoro/test-samples.sh | 96 +--- .pre-commit-config.yaml | 2 +- docs/notebooks_v1beta1/notebook_service.rst | 11 + docs/notebooks_v1beta1/services.rst | 6 +- docs/notebooks_v1beta1/types.rst | 1 + .../services/notebook_service/async_client.py | 201 ++++--- .../services/notebook_service/client.py | 210 +++---- .../services/notebook_service/pagers.py | 43 +- .../notebook_service/transports/base.py | 18 +- .../notebook_service/transports/grpc.py | 114 ++-- .../transports/grpc_asyncio.py | 122 ++-- .../cloud/notebooks_v1beta1/types/__init__.py | 64 +-- .../notebooks_v1beta1/types/environment.py | 6 +- .../cloud/notebooks_v1beta1/types/instance.py | 24 +- .../cloud/notebooks_v1beta1/types/service.py | 18 +- noxfile.py | 51 +- renovate.json | 3 +- setup.py | 2 +- synth.metadata | 101 +--- synth.py | 2 +- testing/constraints-3.6.txt | 4 +- .../unit/gapic/notebooks_v1beta1/__init__.py | 15 + .../test_notebook_service.py | 540 +++++++++++++++--- 31 files changed, 1162 insertions(+), 687 deletions(-) create mode 100644 .coveragerc create mode 100644 .kokoro/samples/python3.6/periodic-head.cfg create mode 100644 .kokoro/samples/python3.7/periodic-head.cfg create mode 100644 .kokoro/samples/python3.8/periodic-head.cfg create mode 100755 .kokoro/test-samples-against-head.sh create mode 100755 .kokoro/test-samples-impl.sh create mode 100644 docs/notebooks_v1beta1/notebook_service.rst diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..73a7e26 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +omit = + google/cloud/notebooks/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/.gitignore b/.gitignore index b9daa52..b4243ce 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 6c4a64d..ab9b6e2 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation python3 -m pip install --upgrade --quiet nox python3 -m nox --version +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 0000000..f5e0618 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-notebooks + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 0000000..cf5de74 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index f05fb0a..1273a87 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-notebooks # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b1..32302e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/docs/notebooks_v1beta1/notebook_service.rst b/docs/notebooks_v1beta1/notebook_service.rst new file mode 100644 index 0000000..6f8cc21 --- /dev/null +++ b/docs/notebooks_v1beta1/notebook_service.rst @@ -0,0 +1,11 @@ +NotebookService +--------------------------------- + +.. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service.pagers + :members: + :inherited-members: diff --git a/docs/notebooks_v1beta1/services.rst b/docs/notebooks_v1beta1/services.rst index a88bad5..33b3547 100644 --- a/docs/notebooks_v1beta1/services.rst +++ b/docs/notebooks_v1beta1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Notebooks v1beta1 API =============================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service - :members: - :inherited-members: + notebook_service diff --git a/docs/notebooks_v1beta1/types.rst b/docs/notebooks_v1beta1/types.rst index b981f31..7cdeaf4 100644 --- a/docs/notebooks_v1beta1/types.rst +++ b/docs/notebooks_v1beta1/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Notebooks v1beta1 API .. automodule:: google.cloud.notebooks_v1beta1.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py index 99fafe3..98eb73d 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py @@ -84,7 +84,36 @@ class NotebookServiceAsyncClient: NotebookServiceClient.parse_common_location_path ) - from_service_account_file = NotebookServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceAsyncClient: The constructed client. + """ + return NotebookServiceClient.from_service_account_info.__func__(NotebookServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceAsyncClient: The constructed client. + """ + return NotebookServiceClient.from_service_account_file.__func__(NotebookServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -159,7 +188,7 @@ async def list_instances( r"""Lists instances in a given project and location. Args: - request (:class:`~.service.ListInstancesRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.ListInstancesRequest`): The request object. Request for listing notebook instances. @@ -170,7 +199,7 @@ async def list_instances( sent along with the request as metadata. Returns: - ~.pagers.ListInstancesAsyncPager: + google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListInstancesAsyncPager: Response for listing notebook instances. Iterating over this object will yield @@ -219,7 +248,7 @@ async def get_instance( r"""Gets details of a single Instance. Args: - request (:class:`~.service.GetInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.GetInstanceRequest`): The request object. Request for getting a notebook instance. @@ -230,7 +259,7 @@ async def get_instance( sent along with the request as metadata. Returns: - ~.instance.Instance: + google.cloud.notebooks_v1beta1.types.Instance: The definition of a notebook instance. @@ -271,7 +300,7 @@ async def create_instance( location. Args: - request (:class:`~.service.CreateInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.CreateInstanceRequest`): The request object. Request for creating a notebook instance. @@ -282,12 +311,12 @@ async def create_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -338,7 +367,7 @@ async def register_instance( API. Args: - request (:class:`~.service.RegisterInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.RegisterInstanceRequest`): The request object. Request for registering a notebook instance. @@ -349,12 +378,12 @@ async def register_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -400,7 +429,7 @@ async def set_instance_accelerator( r"""Updates the guest accelerators of a single Instance. Args: - request (:class:`~.service.SetInstanceAcceleratorRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceAcceleratorRequest`): The request object. Request for setting instance accelerator. @@ -411,12 +440,12 @@ async def set_instance_accelerator( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -462,7 +491,7 @@ async def set_instance_machine_type( r"""Updates the machine type of a single Instance. Args: - request (:class:`~.service.SetInstanceMachineTypeRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceMachineTypeRequest`): The request object. Request for setting instance machine type. @@ -473,12 +502,12 @@ async def set_instance_machine_type( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -524,7 +553,7 @@ async def set_instance_labels( r"""Updates the labels of an Instance. Args: - request (:class:`~.service.SetInstanceLabelsRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest`): The request object. Request for setting instance labels. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -534,12 +563,12 @@ async def set_instance_labels( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -585,7 +614,7 @@ async def delete_instance( r"""Deletes a single Instance. Args: - request (:class:`~.service.DeleteInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.DeleteInstanceRequest`): The request object. Request for deleting a notebook instance. @@ -596,24 +625,22 @@ async def delete_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -659,7 +686,7 @@ async def start_instance( r"""Starts a notebook instance. Args: - request (:class:`~.service.StartInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.StartInstanceRequest`): The request object. Request for starting a notebook instance @@ -670,12 +697,12 @@ async def start_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -721,7 +748,7 @@ async def stop_instance( r"""Stops a notebook instance. Args: - request (:class:`~.service.StopInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.StopInstanceRequest`): The request object. Request for stopping a notebook instance @@ -732,12 +759,12 @@ async def stop_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -783,7 +810,7 @@ async def reset_instance( r"""Resets a notebook instance. Args: - request (:class:`~.service.ResetInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.ResetInstanceRequest`): The request object. Request for reseting a notebook instance @@ -794,12 +821,12 @@ async def reset_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -849,7 +876,7 @@ async def report_instance_info( this method directly. Args: - request (:class:`~.service.ReportInstanceInfoRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest`): The request object. Request for notebook instances to report information to Notebooks API. @@ -860,12 +887,12 @@ async def report_instance_info( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -911,7 +938,7 @@ async def is_instance_upgradeable( r"""Check if a notebook instance is upgradable. Args: - request (:class:`~.service.IsInstanceUpgradeableRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest`): The request object. Request for checking if a notebook instance is upgradeable. @@ -922,7 +949,7 @@ async def is_instance_upgradeable( sent along with the request as metadata. Returns: - ~.service.IsInstanceUpgradeableResponse: + google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableResponse: Response for checking if a notebook instance is upgradeable. @@ -964,7 +991,7 @@ async def upgrade_instance( r"""Upgrades a notebook instance to the latest version. Args: - request (:class:`~.service.UpgradeInstanceRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest`): The request object. Request for upgrading a notebook instance @@ -975,12 +1002,12 @@ async def upgrade_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -1028,7 +1055,7 @@ async def upgrade_instance_internal( this method directly. Args: - request (:class:`~.service.UpgradeInstanceInternalRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest`): The request object. Request for upgrading a notebook instance from within the VM @@ -1039,12 +1066,12 @@ async def upgrade_instance_internal( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -1090,7 +1117,7 @@ async def list_environments( r"""Lists environments in a project. Args: - request (:class:`~.service.ListEnvironmentsRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest`): The request object. Request for listing environments. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1100,7 +1127,7 @@ async def list_environments( sent along with the request as metadata. Returns: - ~.pagers.ListEnvironmentsAsyncPager: + google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListEnvironmentsAsyncPager: Response for listing environments. Iterating over this object will yield results and resolve additional pages @@ -1148,7 +1175,7 @@ async def get_environment( r"""Gets details of a single Environment. Args: - request (:class:`~.service.GetEnvironmentRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.GetEnvironmentRequest`): The request object. Request for getting a notebook environment. @@ -1159,7 +1186,7 @@ async def get_environment( sent along with the request as metadata. Returns: - ~.environment.Environment: + google.cloud.notebooks_v1beta1.types.Environment: Definition of a software environment that is used to start a notebook instance. @@ -1200,7 +1227,7 @@ async def create_environment( r"""Creates a new Environment. Args: - request (:class:`~.service.CreateEnvironmentRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.CreateEnvironmentRequest`): The request object. Request for creating a notebook environment. @@ -1211,13 +1238,11 @@ async def create_environment( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.environment.Environment``: Definition of a - software environment that is used to start a notebook - instance. + The result type for the operation will be :class:`google.cloud.notebooks_v1beta1.types.Environment` Definition of a software environment that is used to start a notebook + instance. """ # Create or coerce a protobuf request object. @@ -1263,7 +1288,7 @@ async def delete_environment( r"""Deletes a single Environment. Args: - request (:class:`~.service.DeleteEnvironmentRequest`): + request (:class:`google.cloud.notebooks_v1beta1.types.DeleteEnvironmentRequest`): The request object. Request for deleting a notebook environment. @@ -1274,24 +1299,22 @@ async def delete_environment( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py index 059c910..97acda4 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py @@ -116,6 +116,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -128,7 +144,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + NotebookServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -248,10 +264,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.NotebookServiceTransport]): The + transport (Union[str, NotebookServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -287,21 +303,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -344,7 +356,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -360,7 +372,7 @@ def list_instances( r"""Lists instances in a given project and location. Args: - request (:class:`~.service.ListInstancesRequest`): + request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest): The request object. Request for listing notebook instances. @@ -371,7 +383,7 @@ def list_instances( sent along with the request as metadata. Returns: - ~.pagers.ListInstancesPager: + google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListInstancesPager: Response for listing notebook instances. Iterating over this object will yield @@ -421,7 +433,7 @@ def get_instance( r"""Gets details of a single Instance. Args: - request (:class:`~.service.GetInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.GetInstanceRequest): The request object. Request for getting a notebook instance. @@ -432,7 +444,7 @@ def get_instance( sent along with the request as metadata. Returns: - ~.instance.Instance: + google.cloud.notebooks_v1beta1.types.Instance: The definition of a notebook instance. @@ -474,7 +486,7 @@ def create_instance( location. Args: - request (:class:`~.service.CreateInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.CreateInstanceRequest): The request object. Request for creating a notebook instance. @@ -485,12 +497,12 @@ def create_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -542,7 +554,7 @@ def register_instance( API. Args: - request (:class:`~.service.RegisterInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.RegisterInstanceRequest): The request object. Request for registering a notebook instance. @@ -553,12 +565,12 @@ def register_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -605,7 +617,7 @@ def set_instance_accelerator( r"""Updates the guest accelerators of a single Instance. Args: - request (:class:`~.service.SetInstanceAcceleratorRequest`): + request (google.cloud.notebooks_v1beta1.types.SetInstanceAcceleratorRequest): The request object. Request for setting instance accelerator. @@ -616,12 +628,12 @@ def set_instance_accelerator( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -668,7 +680,7 @@ def set_instance_machine_type( r"""Updates the machine type of a single Instance. Args: - request (:class:`~.service.SetInstanceMachineTypeRequest`): + request (google.cloud.notebooks_v1beta1.types.SetInstanceMachineTypeRequest): The request object. Request for setting instance machine type. @@ -679,12 +691,12 @@ def set_instance_machine_type( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -733,7 +745,7 @@ def set_instance_labels( r"""Updates the labels of an Instance. Args: - request (:class:`~.service.SetInstanceLabelsRequest`): + request (google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest): The request object. Request for setting instance labels. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -743,12 +755,12 @@ def set_instance_labels( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -795,7 +807,7 @@ def delete_instance( r"""Deletes a single Instance. Args: - request (:class:`~.service.DeleteInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.DeleteInstanceRequest): The request object. Request for deleting a notebook instance. @@ -806,24 +818,22 @@ def delete_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -870,7 +880,7 @@ def start_instance( r"""Starts a notebook instance. Args: - request (:class:`~.service.StartInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.StartInstanceRequest): The request object. Request for starting a notebook instance @@ -881,12 +891,12 @@ def start_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -933,7 +943,7 @@ def stop_instance( r"""Stops a notebook instance. Args: - request (:class:`~.service.StopInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.StopInstanceRequest): The request object. Request for stopping a notebook instance @@ -944,12 +954,12 @@ def stop_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -996,7 +1006,7 @@ def reset_instance( r"""Resets a notebook instance. Args: - request (:class:`~.service.ResetInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.ResetInstanceRequest): The request object. Request for reseting a notebook instance @@ -1007,12 +1017,12 @@ def reset_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -1063,7 +1073,7 @@ def report_instance_info( this method directly. Args: - request (:class:`~.service.ReportInstanceInfoRequest`): + request (google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest): The request object. Request for notebook instances to report information to Notebooks API. @@ -1074,12 +1084,12 @@ def report_instance_info( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -1126,7 +1136,7 @@ def is_instance_upgradeable( r"""Check if a notebook instance is upgradable. Args: - request (:class:`~.service.IsInstanceUpgradeableRequest`): + request (google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest): The request object. Request for checking if a notebook instance is upgradeable. @@ -1137,7 +1147,7 @@ def is_instance_upgradeable( sent along with the request as metadata. Returns: - ~.service.IsInstanceUpgradeableResponse: + google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableResponse: Response for checking if a notebook instance is upgradeable. @@ -1180,7 +1190,7 @@ def upgrade_instance( r"""Upgrades a notebook instance to the latest version. Args: - request (:class:`~.service.UpgradeInstanceRequest`): + request (google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest): The request object. Request for upgrading a notebook instance @@ -1191,12 +1201,12 @@ def upgrade_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -1245,7 +1255,7 @@ def upgrade_instance_internal( this method directly. Args: - request (:class:`~.service.UpgradeInstanceInternalRequest`): + request (google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest): The request object. Request for upgrading a notebook instance from within the VM @@ -1256,12 +1266,12 @@ def upgrade_instance_internal( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.instance.Instance``: The definition of a - notebook instance. + :class:`google.cloud.notebooks_v1beta1.types.Instance` + The definition of a notebook instance. """ # Create or coerce a protobuf request object. @@ -1310,7 +1320,7 @@ def list_environments( r"""Lists environments in a project. Args: - request (:class:`~.service.ListEnvironmentsRequest`): + request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest): The request object. Request for listing environments. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1320,7 +1330,7 @@ def list_environments( sent along with the request as metadata. Returns: - ~.pagers.ListEnvironmentsPager: + google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListEnvironmentsPager: Response for listing environments. Iterating over this object will yield results and resolve additional pages @@ -1369,7 +1379,7 @@ def get_environment( r"""Gets details of a single Environment. Args: - request (:class:`~.service.GetEnvironmentRequest`): + request (google.cloud.notebooks_v1beta1.types.GetEnvironmentRequest): The request object. Request for getting a notebook environment. @@ -1380,7 +1390,7 @@ def get_environment( sent along with the request as metadata. Returns: - ~.environment.Environment: + google.cloud.notebooks_v1beta1.types.Environment: Definition of a software environment that is used to start a notebook instance. @@ -1422,7 +1432,7 @@ def create_environment( r"""Creates a new Environment. Args: - request (:class:`~.service.CreateEnvironmentRequest`): + request (google.cloud.notebooks_v1beta1.types.CreateEnvironmentRequest): The request object. Request for creating a notebook environment. @@ -1433,13 +1443,11 @@ def create_environment( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.environment.Environment``: Definition of a - software environment that is used to start a notebook - instance. + The result type for the operation will be :class:`google.cloud.notebooks_v1beta1.types.Environment` Definition of a software environment that is used to start a notebook + instance. """ # Create or coerce a protobuf request object. @@ -1486,7 +1494,7 @@ def delete_environment( r"""Deletes a single Environment. Args: - request (:class:`~.service.DeleteEnvironmentRequest`): + request (google.cloud.notebooks_v1beta1.types.DeleteEnvironmentRequest): The request object. Request for deleting a notebook environment. @@ -1497,24 +1505,22 @@ def delete_environment( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py b/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py index 15c54f6..ec305f6 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance @@ -26,7 +35,7 @@ class ListInstancesPager: """A pager for iterating through ``list_instances`` requests. This class thinly wraps an initial - :class:`~.service.ListInstancesResponse` object, and + :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse` object, and provides an ``__iter__`` method to iterate through its ``instances`` field. @@ -35,7 +44,7 @@ class ListInstancesPager: through the ``instances`` field on the corresponding responses. - All the usual :class:`~.service.ListInstancesResponse` + All the usual :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -53,9 +62,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.service.ListInstancesRequest`): + request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest): The initial request object. - response (:class:`~.service.ListInstancesResponse`): + response (google.cloud.notebooks_v1beta1.types.ListInstancesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -88,7 +97,7 @@ class ListInstancesAsyncPager: """A pager for iterating through ``list_instances`` requests. This class thinly wraps an initial - :class:`~.service.ListInstancesResponse` object, and + :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse` object, and provides an ``__aiter__`` method to iterate through its ``instances`` field. @@ -97,7 +106,7 @@ class ListInstancesAsyncPager: through the ``instances`` field on the corresponding responses. - All the usual :class:`~.service.ListInstancesResponse` + All the usual :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -115,9 +124,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.service.ListInstancesRequest`): + request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest): The initial request object. - response (:class:`~.service.ListInstancesResponse`): + response (google.cloud.notebooks_v1beta1.types.ListInstancesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -154,7 +163,7 @@ class ListEnvironmentsPager: """A pager for iterating through ``list_environments`` requests. This class thinly wraps an initial - :class:`~.service.ListEnvironmentsResponse` object, and + :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse` object, and provides an ``__iter__`` method to iterate through its ``environments`` field. @@ -163,7 +172,7 @@ class ListEnvironmentsPager: through the ``environments`` field on the corresponding responses. - All the usual :class:`~.service.ListEnvironmentsResponse` + All the usual :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -181,9 +190,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.service.ListEnvironmentsRequest`): + request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest): The initial request object. - response (:class:`~.service.ListEnvironmentsResponse`): + response (google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -216,7 +225,7 @@ class ListEnvironmentsAsyncPager: """A pager for iterating through ``list_environments`` requests. This class thinly wraps an initial - :class:`~.service.ListEnvironmentsResponse` object, and + :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse` object, and provides an ``__aiter__`` method to iterate through its ``environments`` field. @@ -225,7 +234,7 @@ class ListEnvironmentsAsyncPager: through the ``environments`` field on the corresponding responses. - All the usual :class:`~.service.ListEnvironmentsResponse` + All the usual :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -243,9 +252,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.service.ListEnvironmentsRequest`): + request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest): The initial request object. - response (:class:`~.service.ListEnvironmentsResponse`): + response (google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py index 240525b..d72ee7b 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py @@ -71,10 +71,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -82,6 +82,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -91,20 +94,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py index 9b30743..5aed906 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -91,6 +92,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -105,72 +110,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -178,18 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -203,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py index a68037d..9cf81f5 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py @@ -65,7 +65,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -105,6 +105,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -136,12 +137,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -150,72 +155,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -223,18 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/notebooks_v1beta1/types/__init__.py b/google/cloud/notebooks_v1beta1/types/__init__.py index 853a88d..6b8325a 100644 --- a/google/cloud/notebooks_v1beta1/types/__init__.py +++ b/google/cloud/notebooks_v1beta1/types/__init__.py @@ -16,63 +16,63 @@ # from .environment import ( + ContainerImage, Environment, VmImage, - ContainerImage, ) from .instance import Instance from .service import ( - OperationMetadata, + CreateEnvironmentRequest, + CreateInstanceRequest, + DeleteEnvironmentRequest, + DeleteInstanceRequest, + GetEnvironmentRequest, + GetInstanceRequest, + IsInstanceUpgradeableRequest, + IsInstanceUpgradeableResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, ListInstancesRequest, ListInstancesResponse, - GetInstanceRequest, - CreateInstanceRequest, + OperationMetadata, RegisterInstanceRequest, + ReportInstanceInfoRequest, + ResetInstanceRequest, SetInstanceAcceleratorRequest, - SetInstanceMachineTypeRequest, SetInstanceLabelsRequest, - DeleteInstanceRequest, + SetInstanceMachineTypeRequest, StartInstanceRequest, StopInstanceRequest, - ResetInstanceRequest, - ReportInstanceInfoRequest, - IsInstanceUpgradeableRequest, - IsInstanceUpgradeableResponse, - UpgradeInstanceRequest, UpgradeInstanceInternalRequest, - ListEnvironmentsRequest, - ListEnvironmentsResponse, - GetEnvironmentRequest, - CreateEnvironmentRequest, - DeleteEnvironmentRequest, + UpgradeInstanceRequest, ) __all__ = ( + "ContainerImage", "Environment", "VmImage", - "ContainerImage", "Instance", - "OperationMetadata", + "CreateEnvironmentRequest", + "CreateInstanceRequest", + "DeleteEnvironmentRequest", + "DeleteInstanceRequest", + "GetEnvironmentRequest", + "GetInstanceRequest", + "IsInstanceUpgradeableRequest", + "IsInstanceUpgradeableResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", "ListInstancesRequest", "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", + "OperationMetadata", "RegisterInstanceRequest", + "ReportInstanceInfoRequest", + "ResetInstanceRequest", "SetInstanceAcceleratorRequest", - "SetInstanceMachineTypeRequest", "SetInstanceLabelsRequest", - "DeleteInstanceRequest", + "SetInstanceMachineTypeRequest", "StartInstanceRequest", "StopInstanceRequest", - "ResetInstanceRequest", - "ReportInstanceInfoRequest", - "IsInstanceUpgradeableRequest", - "IsInstanceUpgradeableResponse", - "UpgradeInstanceRequest", "UpgradeInstanceInternalRequest", - "ListEnvironmentsRequest", - "ListEnvironmentsResponse", - "GetEnvironmentRequest", - "CreateEnvironmentRequest", - "DeleteEnvironmentRequest", + "UpgradeInstanceRequest", ) diff --git a/google/cloud/notebooks_v1beta1/types/environment.py b/google/cloud/notebooks_v1beta1/types/environment.py index 5f13677..4e02c7e 100644 --- a/google/cloud/notebooks_v1beta1/types/environment.py +++ b/google/cloud/notebooks_v1beta1/types/environment.py @@ -39,10 +39,10 @@ class Environment(proto.Message): Display name of this environment for the UI. description (str): A brief description of this environment. - vm_image (~.environment.VmImage): + vm_image (google.cloud.notebooks_v1beta1.types.VmImage): Use a Compute Engine VM image to start the notebook instance. - container_image (~.environment.ContainerImage): + container_image (google.cloud.notebooks_v1beta1.types.ContainerImage): Use a container image to start the notebook instance. post_startup_script (str): @@ -50,7 +50,7 @@ class Environment(proto.Message): notebook instance fully boots up. The path must be a URL or Cloud Storage path. Example: ``"gs://path-to-file/file-name"`` - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which this environment was created. """ diff --git a/google/cloud/notebooks_v1beta1/types/instance.py b/google/cloud/notebooks_v1beta1/types/instance.py index 9a775f3..636825e 100644 --- a/google/cloud/notebooks_v1beta1/types/instance.py +++ b/google/cloud/notebooks_v1beta1/types/instance.py @@ -34,10 +34,10 @@ class Instance(proto.Message): name (str): Output only. The name of this notebook instance. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` - vm_image (~.environment.VmImage): + vm_image (google.cloud.notebooks_v1beta1.types.VmImage): Use a Compute Engine VM image to start the notebook instance. - container_image (~.environment.ContainerImage): + container_image (google.cloud.notebooks_v1beta1.types.ContainerImage): Use a container image to start the notebook instance. post_startup_script (str): @@ -67,12 +67,12 @@ class Instance(proto.Message): Required. The `Compute Engine machine type `__ of this instance. - accelerator_config (~.instance.Instance.AcceleratorConfig): + accelerator_config (google.cloud.notebooks_v1beta1.types.Instance.AcceleratorConfig): The hardware accelerator used on this instance. If you use accelerators, make sure that your configuration has `enough vCPUs and memory to support the ``machine_type`` you have selected `__. - state (~.instance.Instance.State): + state (google.cloud.notebooks_v1beta1.types.Instance.State): Output only. The state of this instance. install_gpu_driver (bool): Whether the end user authorizes Google Cloud @@ -84,7 +84,7 @@ class Instance(proto.Message): Specify a custom Cloud Storage path where the GPU driver is stored. If not specified, we'll automatically choose from official GPU drivers. - boot_disk_type (~.instance.Instance.DiskType): + boot_disk_type (google.cloud.notebooks_v1beta1.types.Instance.DiskType): Input only. The type of the boot disk attached to this instance, defaults to standard persistent disk (``PD_STANDARD``). @@ -94,7 +94,7 @@ class Instance(proto.Message): 64000 GB (64 TB). The minimum recommended value is 100 GB. If not specified, this defaults to 100. - data_disk_type (~.instance.Instance.DiskType): + data_disk_type (google.cloud.notebooks_v1beta1.types.Instance.DiskType): Input only. The type of the data disk attached to this instance, defaults to standard persistent disk (``PD_STANDARD``). @@ -108,7 +108,7 @@ class Instance(proto.Message): no_remove_data_disk (bool): Input only. If true, the data disk will not be auto deleted when deleting the instance. - disk_encryption (~.instance.Instance.DiskEncryption): + disk_encryption (google.cloud.notebooks_v1beta1.types.Instance.DiskEncryption): Input only. Disk encryption method used on the boot and data disks, defaults to GMEK. kms_key (str): @@ -130,15 +130,15 @@ class Instance(proto.Message): subnet (str): The name of the subnet that this instance is in. Format: ``projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`` - labels (Sequence[~.instance.Instance.LabelsEntry]): + labels (Sequence[google.cloud.notebooks_v1beta1.types.Instance.LabelsEntry]): Labels to apply to this instance. These can be later modified by the setLabels method. - metadata (Sequence[~.instance.Instance.MetadataEntry]): + metadata (Sequence[google.cloud.notebooks_v1beta1.types.Instance.MetadataEntry]): Custom metadata to apply to this instance. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Instance creation time. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Instance update time. """ @@ -191,7 +191,7 @@ class AcceleratorConfig(proto.Message): combination. TPUs are not supported. Attributes: - type_ (~.instance.Instance.AcceleratorType): + type_ (google.cloud.notebooks_v1beta1.types.Instance.AcceleratorType): Type of this accelerator. core_count (int): Count of cores of this accelerator. diff --git a/google/cloud/notebooks_v1beta1/types/service.py b/google/cloud/notebooks_v1beta1/types/service.py index 75b9cc7..ddabeea 100644 --- a/google/cloud/notebooks_v1beta1/types/service.py +++ b/google/cloud/notebooks_v1beta1/types/service.py @@ -57,9 +57,9 @@ class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. Attributes: - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): The time the operation was created. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): The time the operation finished running. target (str): Server-defined resource path for the target @@ -123,7 +123,7 @@ class ListInstancesResponse(proto.Message): r"""Response for listing notebook instances. Attributes: - instances (Sequence[~.gcn_instance.Instance]): + instances (Sequence[google.cloud.notebooks_v1beta1.types.Instance]): A list of returned instances. next_page_token (str): Page token that can be used to continue @@ -170,7 +170,7 @@ class CreateInstanceRequest(proto.Message): instance_id (str): Required. User-defined unique ID of this instance. - instance (~.gcn_instance.Instance): + instance (google.cloud.notebooks_v1beta1.types.Instance): Required. The instance to be created. """ @@ -208,7 +208,7 @@ class SetInstanceAcceleratorRequest(proto.Message): name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` - type_ (~.gcn_instance.Instance.AcceleratorType): + type_ (google.cloud.notebooks_v1beta1.types.Instance.AcceleratorType): Required. Type of this accelerator. core_count (int): Required. Count of cores of this accelerator. Note that not @@ -251,7 +251,7 @@ class SetInstanceLabelsRequest(proto.Message): name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` - labels (Sequence[~.service.SetInstanceLabelsRequest.LabelsEntry]): + labels (Sequence[google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest.LabelsEntry]): Labels to apply to this instance. These can be later modified by the setLabels method @@ -323,7 +323,7 @@ class ReportInstanceInfoRequest(proto.Message): authenticating the VM. https://cloud.google.com/compute/docs/instances/verifying- instance-identity - metadata (Sequence[~.service.ReportInstanceInfoRequest.MetadataEntry]): + metadata (Sequence[google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest.MetadataEntry]): The metadata reported to Notebooks API. This will be merged to the instance metadata store """ @@ -424,7 +424,7 @@ class ListEnvironmentsResponse(proto.Message): r"""Response for listing environments. Attributes: - environments (Sequence[~.gcn_environment.Environment]): + environments (Sequence[google.cloud.notebooks_v1beta1.types.Environment]): A list of returned environments. next_page_token (str): A page token that can be used to continue @@ -472,7 +472,7 @@ class CreateEnvironmentRequest(proto.Message): contain only lowercase letters, numeric characters, and dashes. The first character must be a lowercase letter and the last character cannot be a dash. - environment (~.gcn_environment.Environment): + environment (google.cloud.notebooks_v1beta1.types.Environment): Required. The environment to be created. """ diff --git a/noxfile.py b/noxfile.py index 70d9c13..4d37cd3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -41,6 +44,9 @@ "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -81,18 +87,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -113,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -122,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -134,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -154,7 +179,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -186,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa9493..f08bc22 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/setup.py b/setup.py index 1f140be..d36d61d 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.1.0", ), python_requires=">=3.6", diff --git a/synth.metadata b/synth.metadata index 596ea25..1839ad1 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-notebooks.git", - "sha": "bf6d2d927132a8c49872bce622eba5b863157f3b" + "remote": "git@github.com:googleapis/python-notebooks", + "sha": "8d668f883761f7c77d6aeeb5ef5834cd097f1ceb" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", - "internalRef": "347055288" + "sha": "95dd24960cf9f794ef583e59ad9f1fabe1c4a924", + "internalRef": "365882072" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } } ], @@ -40,94 +40,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "docs/notebooks_v1beta1/services.rst", - "docs/notebooks_v1beta1/types.rst", - "google/cloud/notebooks/__init__.py", - "google/cloud/notebooks/py.typed", - "google/cloud/notebooks_v1beta1/__init__.py", - "google/cloud/notebooks_v1beta1/py.typed", - "google/cloud/notebooks_v1beta1/services/__init__.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/client.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py", - "google/cloud/notebooks_v1beta1/types/__init__.py", - "google/cloud/notebooks_v1beta1/types/environment.py", - "google/cloud/notebooks_v1beta1/types/instance.py", - "google/cloud/notebooks_v1beta1/types/service.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/notebooks_v1beta1/__init__.py", - "tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py" ] } \ No newline at end of file diff --git a/synth.py b/synth.py index a77bad9..8c05f67 100644 --- a/synth.py +++ b/synth.py @@ -37,7 +37,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=99, microgenerator=True) +templated_files = common.py_library(cov_level=98, microgenerator=True) s.move( templated_files, excludes=[".coveragerc"] ) # the microgenerator has a good coveragerc file diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index a9e749b..2de4f17 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,5 +5,5 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.0 -proto-plus==1.1.0 \ No newline at end of file +google-api-core==1.22.2 +proto-plus==1.1.0 diff --git a/tests/unit/gapic/notebooks_v1beta1/__init__.py b/tests/unit/gapic/notebooks_v1beta1/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/notebooks_v1beta1/__init__.py +++ b/tests/unit/gapic/notebooks_v1beta1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py index d0c6432..97a18c7 100644 --- a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py +++ b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py @@ -96,7 +96,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [NotebookServiceClient, NotebookServiceAsyncClient] + "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,] +) +def test_notebook_service_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "notebooks.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,] ) def test_notebook_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -106,16 +123,21 @@ def test_notebook_service_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "notebooks.googleapis.com:443" def test_notebook_service_client_get_transport_class(): transport = NotebookServiceClient.get_transport_class() - assert transport == transports.NotebookServiceGrpcTransport + available_transports = [ + transports.NotebookServiceGrpcTransport, + ] + assert transport in available_transports transport = NotebookServiceClient.get_transport_class("grpc") assert transport == transports.NotebookServiceGrpcTransport @@ -166,7 +188,7 @@ def test_notebook_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -182,7 +204,7 @@ def test_notebook_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -198,7 +220,7 @@ def test_notebook_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -226,7 +248,7 @@ def test_notebook_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -287,29 +309,25 @@ def test_notebook_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -318,66 +336,53 @@ def test_notebook_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -403,7 +408,7 @@ def test_notebook_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -433,7 +438,7 @@ def test_notebook_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -452,7 +457,7 @@ def test_notebook_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -497,6 +502,22 @@ def test_list_instances_from_dict(): test_list_instances(request_type=dict) +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.ListInstancesRequest() + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest @@ -822,6 +843,22 @@ def test_get_instance_from_dict(): test_get_instance(request_type=dict) +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.GetInstanceRequest() + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest @@ -1000,6 +1037,22 @@ def test_create_instance_from_dict(): test_create_instance(request_type=dict) +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.CreateInstanceRequest() + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest @@ -1121,6 +1174,24 @@ def test_register_instance_from_dict(): test_register_instance(request_type=dict) +def test_register_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_instance), "__call__" + ) as call: + client.register_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.RegisterInstanceRequest() + + @pytest.mark.asyncio async def test_register_instance_async( transport: str = "grpc_asyncio", request_type=service.RegisterInstanceRequest @@ -1248,6 +1319,24 @@ def test_set_instance_accelerator_from_dict(): test_set_instance_accelerator(request_type=dict) +def test_set_instance_accelerator_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_accelerator), "__call__" + ) as call: + client.set_instance_accelerator() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.SetInstanceAcceleratorRequest() + + @pytest.mark.asyncio async def test_set_instance_accelerator_async( transport: str = "grpc_asyncio", request_type=service.SetInstanceAcceleratorRequest @@ -1375,6 +1464,24 @@ def test_set_instance_machine_type_from_dict(): test_set_instance_machine_type(request_type=dict) +def test_set_instance_machine_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_machine_type), "__call__" + ) as call: + client.set_instance_machine_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.SetInstanceMachineTypeRequest() + + @pytest.mark.asyncio async def test_set_instance_machine_type_async( transport: str = "grpc_asyncio", request_type=service.SetInstanceMachineTypeRequest @@ -1502,6 +1609,24 @@ def test_set_instance_labels_from_dict(): test_set_instance_labels(request_type=dict) +def test_set_instance_labels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_labels), "__call__" + ) as call: + client.set_instance_labels() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.SetInstanceLabelsRequest() + + @pytest.mark.asyncio async def test_set_instance_labels_async( transport: str = "grpc_asyncio", request_type=service.SetInstanceLabelsRequest @@ -1627,6 +1752,22 @@ def test_delete_instance_from_dict(): test_delete_instance(request_type=dict) +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.DeleteInstanceRequest() + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest @@ -1746,6 +1887,22 @@ def test_start_instance_from_dict(): test_start_instance(request_type=dict) +def test_start_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_instance), "__call__") as call: + client.start_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.StartInstanceRequest() + + @pytest.mark.asyncio async def test_start_instance_async( transport: str = "grpc_asyncio", request_type=service.StartInstanceRequest @@ -1865,6 +2022,22 @@ def test_stop_instance_from_dict(): test_stop_instance(request_type=dict) +def test_stop_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: + client.stop_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.StopInstanceRequest() + + @pytest.mark.asyncio async def test_stop_instance_async( transport: str = "grpc_asyncio", request_type=service.StopInstanceRequest @@ -1984,6 +2157,22 @@ def test_reset_instance_from_dict(): test_reset_instance(request_type=dict) +def test_reset_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: + client.reset_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.ResetInstanceRequest() + + @pytest.mark.asyncio async def test_reset_instance_async( transport: str = "grpc_asyncio", request_type=service.ResetInstanceRequest @@ -2105,6 +2294,24 @@ def test_report_instance_info_from_dict(): test_report_instance_info(request_type=dict) +def test_report_instance_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_instance_info), "__call__" + ) as call: + client.report_instance_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.ReportInstanceInfoRequest() + + @pytest.mark.asyncio async def test_report_instance_info_async( transport: str = "grpc_asyncio", request_type=service.ReportInstanceInfoRequest @@ -2243,6 +2450,24 @@ def test_is_instance_upgradeable_from_dict(): test_is_instance_upgradeable(request_type=dict) +def test_is_instance_upgradeable_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.is_instance_upgradeable), "__call__" + ) as call: + client.is_instance_upgradeable() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.IsInstanceUpgradeableRequest() + + @pytest.mark.asyncio async def test_is_instance_upgradeable_async( transport: str = "grpc_asyncio", request_type=service.IsInstanceUpgradeableRequest @@ -2384,6 +2609,22 @@ def test_upgrade_instance_from_dict(): test_upgrade_instance(request_type=dict) +def test_upgrade_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + client.upgrade_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.UpgradeInstanceRequest() + + @pytest.mark.asyncio async def test_upgrade_instance_async( transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceRequest @@ -2505,6 +2746,24 @@ def test_upgrade_instance_internal_from_dict(): test_upgrade_instance_internal(request_type=dict) +def test_upgrade_instance_internal_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance_internal), "__call__" + ) as call: + client.upgrade_instance_internal() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.UpgradeInstanceInternalRequest() + + @pytest.mark.asyncio async def test_upgrade_instance_internal_async( transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceInternalRequest @@ -2639,6 +2898,24 @@ def test_list_environments_from_dict(): test_list_environments(request_type=dict) +def test_list_environments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + client.list_environments() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.ListEnvironmentsRequest() + + @pytest.mark.asyncio async def test_list_environments_async( transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest @@ -2932,6 +3209,22 @@ def test_get_environment_from_dict(): test_get_environment(request_type=dict) +def test_get_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + client.get_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.GetEnvironmentRequest() + + @pytest.mark.asyncio async def test_get_environment_async( transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest @@ -3066,6 +3359,24 @@ def test_create_environment_from_dict(): test_create_environment(request_type=dict) +def test_create_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + client.create_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.CreateEnvironmentRequest() + + @pytest.mark.asyncio async def test_create_environment_async( transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest @@ -3193,6 +3504,24 @@ def test_delete_environment_from_dict(): test_delete_environment(request_type=dict) +def test_delete_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + client.delete_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == service.DeleteEnvironmentRequest() + + @pytest.mark.asyncio async def test_delete_environment_async( transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest @@ -3468,6 +3797,51 @@ def test_notebook_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +def test_notebook_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_notebook_service_host_no_port(): client = NotebookServiceClient( credentials=credentials.AnonymousCredentials(), @@ -3489,7 +3863,7 @@ def test_notebook_service_host_with_port(): def test_notebook_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.NotebookServiceGrpcTransport( @@ -3501,7 +3875,7 @@ def test_notebook_service_grpc_transport_channel(): def test_notebook_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.NotebookServiceGrpcAsyncIOTransport( @@ -3512,6 +3886,8 @@ def test_notebook_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -3526,7 +3902,7 @@ def test_notebook_service_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3564,6 +3940,8 @@ def test_notebook_service_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -3579,7 +3957,7 @@ def test_notebook_service_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel From 4cb05085253603f7647c0482cb8683ccb64f6859 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 1 Apr 2021 07:48:48 -0700 Subject: [PATCH 04/18] chore: start tracking obsolete files (#29) --- synth.metadata | 108 ++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 102 insertions(+), 6 deletions(-) diff --git a/synth.metadata b/synth.metadata index 1839ad1..5bd103c 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-notebooks", - "sha": "8d668f883761f7c77d6aeeb5ef5834cd097f1ceb" + "remote": "https://github.com/googleapis/python-notebooks.git", + "sha": "4999922dc0f6eaebc8aec58929176ab6b87cfdca" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "95dd24960cf9f794ef583e59ad9f1fabe1c4a924", - "internalRef": "365882072" + "sha": "915925089600094e72e4bfa8cf586c170e6b7109", + "internalRef": "366152684" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" + "sha": "a22531f8364582f51485e4e9db85872d93514ab1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" + "sha": "a22531f8364582f51485e4e9db85872d93514ab1" } } ], @@ -40,5 +40,101 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".coveragerc", + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic-head.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic-head.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic-head.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples-against-head.sh", + ".kokoro/test-samples-impl.sh", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/multiprocessing.rst", + "docs/notebooks_v1beta1/notebook_service.rst", + "docs/notebooks_v1beta1/services.rst", + "docs/notebooks_v1beta1/types.rst", + "google/cloud/notebooks/__init__.py", + "google/cloud/notebooks/py.typed", + "google/cloud/notebooks_v1beta1/__init__.py", + "google/cloud/notebooks_v1beta1/py.typed", + "google/cloud/notebooks_v1beta1/services/__init__.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/client.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py", + "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py", + "google/cloud/notebooks_v1beta1/types/__init__.py", + "google/cloud/notebooks_v1beta1/types/environment.py", + "google/cloud/notebooks_v1beta1/types/instance.py", + "google/cloud/notebooks_v1beta1/types/service.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "scripts/decrypt-secrets.sh", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/notebooks_v1beta1/__init__.py", + "tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py" ] } \ No newline at end of file From 1193643464f95d52f5e8d0982419c4a516c79d09 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 7 Apr 2021 08:20:08 -0700 Subject: [PATCH 05/18] chore: Add license headers for python config files (#30) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/d9d975fd-7d94-4e18-acf5-f81484b3e675/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc --- .pre-commit-config.yaml | 14 ++++++++++++++ docs/conf.py | 13 +++++++++++++ synth.metadata | 6 +++--- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32302e4..8912e9b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/docs/conf.py b/docs/conf.py index c79c15d..b072fff 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-notebooks documentation build configuration file # diff --git a/synth.metadata b/synth.metadata index 5bd103c..37a5ea7 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-notebooks.git", - "sha": "4999922dc0f6eaebc8aec58929176ab6b87cfdca" + "sha": "4cb05085253603f7647c0482cb8683ccb64f6859" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a22531f8364582f51485e4e9db85872d93514ab1" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a22531f8364582f51485e4e9db85872d93514ab1" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } } ], From 0f48d18d3f285c8b153993a12bd3f74cfdd47ee0 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sun, 11 Apr 2021 04:10:57 -0700 Subject: [PATCH 06/18] chore: add constraints file check for python samples (#31) This is the sibling PR to https://github.com/GoogleCloudPlatform/python-docs-samples/pull/5611 and this is the issue opened for it https://github.com/GoogleCloudPlatform/python-docs-samples/issues/5549 --- .github/header-checker-lint.yml | 2 +- renovate.json | 5 ++++- synth.metadata | 6 +++--- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml index fc281c0..6fe78aa 100644 --- a/.github/header-checker-lint.yml +++ b/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/renovate.json b/renovate.json index f08bc22..c048955 100644 --- a/renovate.json +++ b/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/synth.metadata b/synth.metadata index 37a5ea7..be28e83 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-notebooks.git", - "sha": "4cb05085253603f7647c0482cb8683ccb64f6859" + "sha": "1193643464f95d52f5e8d0982419c4a516c79d09" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } } ], From 0ea1aa9d5dbc6dd837b31b747e7948159019c2ff Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 16 Apr 2021 17:37:56 -0400 Subject: [PATCH 07/18] chore: prevent normalization of semver versioning (#32) * chore: prevent normalization of semver versioning * chore: update workaround to make sic work --- setup.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d36d61d..6f949e4 100644 --- a/setup.py +++ b/setup.py @@ -19,6 +19,21 @@ import os import setuptools # type: ignore +# Disable version normalization performed by setuptools.setup() +try: + # Try the approach of using sic(), added in setuptools 46.1.0 + from setuptools import sic +except ImportError: + # Try the approach of replacing packaging.version.Version + sic = lambda v: v + try: + # setuptools >=39.0.0 uses packaging from setuptools.extern + from setuptools.extern import packaging + except ImportError: + # setuptools <39.0.0 uses packaging from pkg_resources.extern + from pkg_resources.extern import packaging + packaging.version.Version = packaging.version.LegacyVersion + version = "0.1.2" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -29,7 +44,7 @@ setuptools.setup( name="google-cloud-notebooks", - version=version, + version=sic(version), long_description=readme, author="Google LLC", author_email="googleapis-packages@google.com", From e3efd1bf82dea5b234a561bc9e3f7aa220e7451f Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 14:23:14 -0400 Subject: [PATCH 08/18] chore(revert): revert preventing normalization (#35) --- setup.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/setup.py b/setup.py index 6f949e4..d36d61d 100644 --- a/setup.py +++ b/setup.py @@ -19,21 +19,6 @@ import os import setuptools # type: ignore -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion - version = "0.1.2" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -44,7 +29,7 @@ setuptools.setup( name="google-cloud-notebooks", - version=sic(version), + version=version, long_description=readme, author="Google LLC", author_email="googleapis-packages@google.com", From 3ff89a89d66861e6796049d243e7f868c0272bdf Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 May 2021 10:52:02 -0400 Subject: [PATCH 09/18] chore: migrate to owl bot (#34) --- .github/.OwlBot.lock.yaml | 4 ++ .github/.OwlBot.yaml | 26 +++++++ .kokoro/release.sh | 4 +- .kokoro/release/common.cfg | 14 +--- docs/_static/custom.css | 13 +++- synth.py => owlbot.py | 14 ++-- synth.metadata | 140 ------------------------------------- 7 files changed, 50 insertions(+), 165 deletions(-) create mode 100644 .github/.OwlBot.lock.yaml create mode 100644 .github/.OwlBot.yaml rename synth.py => owlbot.py (72%) delete mode 100644 synth.metadata diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000..29084e8 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000..282df92 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/notebooks/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: b06c9034cfcbce180ba732d03be6526e5c8ea1bc + diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 292da3e..c5ce41e 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-notebooks python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index c07503d..9d9cb3b 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-notebooks/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/docs/_static/custom.css b/docs/_static/custom.css index bcd37bb..b0a2954 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/synth.py b/owlbot.py similarity index 72% rename from synth.py rename to owlbot.py index 8c05f67..208b5ee 100644 --- a/synth.py +++ b/owlbot.py @@ -19,20 +19,14 @@ import synthtool.gcp as gcp from synthtool.languages import python -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -# ---------------------------------------------------------------------------- -# Generate notebooks GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="notebooks", - version="v1beta1", - bazel_target="//google/cloud/notebooks/v1beta1:notebooks-v1beta1-py", +default_version = "v1beta1" -) +for library in s.get_staging_dirs(default_version): + s.move(library, excludes=["scripts/fixup*.py", "setup.py", "README.rst", "docs/index.rst"]) -s.move(library, excludes=["scripts/fixup*.py", "setup.py", "README.rst", "docs/index.rst"]) +s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index be28e83..0000000 --- a/synth.metadata +++ /dev/null @@ -1,140 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-notebooks.git", - "sha": "1193643464f95d52f5e8d0982419c4a516c79d09" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "915925089600094e72e4bfa8cf586c170e6b7109", - "internalRef": "366152684" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "notebooks", - "apiVersion": "v1beta1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "docs/notebooks_v1beta1/notebook_service.rst", - "docs/notebooks_v1beta1/services.rst", - "docs/notebooks_v1beta1/types.rst", - "google/cloud/notebooks/__init__.py", - "google/cloud/notebooks/py.typed", - "google/cloud/notebooks_v1beta1/__init__.py", - "google/cloud/notebooks_v1beta1/py.typed", - "google/cloud/notebooks_v1beta1/services/__init__.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/client.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py", - "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py", - "google/cloud/notebooks_v1beta1/types/__init__.py", - "google/cloud/notebooks_v1beta1/types/environment.py", - "google/cloud/notebooks_v1beta1/types/instance.py", - "google/cloud/notebooks_v1beta1/types/service.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/notebooks_v1beta1/__init__.py", - "tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py" - ] -} \ No newline at end of file From bd0d6442e1d5df4a979f827ceb8f9f59d8fce732 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 May 2021 12:50:02 -0400 Subject: [PATCH 10/18] chore: add library type to .repo-metadata.json (#42) --- .repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.repo-metadata.json b/.repo-metadata.json index dfdbcd2..4817b29 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "", "release_level": "beta", "language": "python", + "library_type": "GAPIC_AUTO", "repo": "googleapis/python-notebooks", "distribution_name": "google-cloud-notebooks", "api_id": "notebooks.googleapis.com" From 05718597e92dea434d4729935029de0de08ea9c9 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Wed, 12 May 2021 13:34:06 +0000 Subject: [PATCH 11/18] chore: add SECURITY.md (#38) chore: add SECURITY.md --- SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..8b58ae9 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 May 2021 14:42:04 +0000 Subject: [PATCH 12/18] chore: upgrade gapic-generator-python to 0.46.3 (#44) PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0a3c7d272d697796db75857bac73905c68e498c3 fix: add async client to %name_%version/init.py chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list feat: support self-signed JWT flow for service accounts chore: enable GAPIC metadata generation chore: sort subpackages in %namespace/%name/init.py --- docs/notebooks_v1beta1/notebook_service.rst | 1 - google/cloud/notebooks/__init__.py | 18 +- google/cloud/notebooks_v1beta1/__init__.py | 7 +- .../notebooks_v1beta1/gapic_metadata.json | 213 +++++ .../notebooks_v1beta1/services/__init__.py | 1 - .../services/notebook_service/__init__.py | 2 - .../services/notebook_service/async_client.py | 70 +- .../services/notebook_service/client.py | 111 +-- .../services/notebook_service/pagers.py | 6 +- .../notebook_service/transports/__init__.py | 2 - .../notebook_service/transports/base.py | 185 +++-- .../notebook_service/transports/grpc.py | 80 +- .../transports/grpc_asyncio.py | 93 ++- .../cloud/notebooks_v1beta1/types/__init__.py | 2 - .../notebooks_v1beta1/types/environment.py | 34 +- .../cloud/notebooks_v1beta1/types/instance.py | 79 +- .../cloud/notebooks_v1beta1/types/service.py | 150 ++-- tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + .../unit/gapic/notebooks_v1beta1/__init__.py | 1 - .../test_notebook_service.py | 782 ++++++++++-------- 22 files changed, 1050 insertions(+), 832 deletions(-) create mode 100644 google/cloud/notebooks_v1beta1/gapic_metadata.json create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py diff --git a/docs/notebooks_v1beta1/notebook_service.rst b/docs/notebooks_v1beta1/notebook_service.rst index 6f8cc21..79a8fea 100644 --- a/docs/notebooks_v1beta1/notebook_service.rst +++ b/docs/notebooks_v1beta1/notebook_service.rst @@ -5,7 +5,6 @@ NotebookService :members: :inherited-members: - .. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service.pagers :members: :inherited-members: diff --git a/google/cloud/notebooks/__init__.py b/google/cloud/notebooks/__init__.py index 70533b4..2d3bf90 100644 --- a/google/cloud/notebooks/__init__.py +++ b/google/cloud/notebooks/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,12 +14,13 @@ # limitations under the License. # -from google.cloud.notebooks_v1beta1.services.notebook_service.async_client import ( - NotebookServiceAsyncClient, -) from google.cloud.notebooks_v1beta1.services.notebook_service.client import ( NotebookServiceClient, ) +from google.cloud.notebooks_v1beta1.services.notebook_service.async_client import ( + NotebookServiceAsyncClient, +) + from google.cloud.notebooks_v1beta1.types.environment import ContainerImage from google.cloud.notebooks_v1beta1.types.environment import Environment from google.cloud.notebooks_v1beta1.types.environment import VmImage @@ -50,23 +50,24 @@ from google.cloud.notebooks_v1beta1.types.service import UpgradeInstanceRequest __all__ = ( + "NotebookServiceClient", + "NotebookServiceAsyncClient", "ContainerImage", + "Environment", + "VmImage", + "Instance", "CreateEnvironmentRequest", "CreateInstanceRequest", "DeleteEnvironmentRequest", "DeleteInstanceRequest", - "Environment", "GetEnvironmentRequest", "GetInstanceRequest", - "Instance", "IsInstanceUpgradeableRequest", "IsInstanceUpgradeableResponse", "ListEnvironmentsRequest", "ListEnvironmentsResponse", "ListInstancesRequest", "ListInstancesResponse", - "NotebookServiceAsyncClient", - "NotebookServiceClient", "OperationMetadata", "RegisterInstanceRequest", "ReportInstanceInfoRequest", @@ -78,5 +79,4 @@ "StopInstanceRequest", "UpgradeInstanceInternalRequest", "UpgradeInstanceRequest", - "VmImage", ) diff --git a/google/cloud/notebooks_v1beta1/__init__.py b/google/cloud/notebooks_v1beta1/__init__.py index 707961a..af872cb 100644 --- a/google/cloud/notebooks_v1beta1/__init__.py +++ b/google/cloud/notebooks_v1beta1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.notebook_service import NotebookServiceClient +from .services.notebook_service import NotebookServiceAsyncClient + from .types.environment import ContainerImage from .types.environment import Environment from .types.environment import VmImage @@ -44,8 +45,8 @@ from .types.service import UpgradeInstanceInternalRequest from .types.service import UpgradeInstanceRequest - __all__ = ( + "NotebookServiceAsyncClient", "ContainerImage", "CreateEnvironmentRequest", "CreateInstanceRequest", @@ -61,6 +62,7 @@ "ListEnvironmentsResponse", "ListInstancesRequest", "ListInstancesResponse", + "NotebookServiceClient", "OperationMetadata", "RegisterInstanceRequest", "ReportInstanceInfoRequest", @@ -73,5 +75,4 @@ "UpgradeInstanceInternalRequest", "UpgradeInstanceRequest", "VmImage", - "NotebookServiceClient", ) diff --git a/google/cloud/notebooks_v1beta1/gapic_metadata.json b/google/cloud/notebooks_v1beta1/gapic_metadata.json new file mode 100644 index 0000000..b435e9c --- /dev/null +++ b/google/cloud/notebooks_v1beta1/gapic_metadata.json @@ -0,0 +1,213 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.notebooks_v1beta1", + "protoPackage": "google.cloud.notebooks.v1beta1", + "schema": "1.0", + "services": { + "NotebookService": { + "clients": { + "grpc": { + "libraryClient": "NotebookServiceClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "IsInstanceUpgradeable": { + "methods": [ + "is_instance_upgradeable" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "RegisterInstance": { + "methods": [ + "register_instance" + ] + }, + "ReportInstanceInfo": { + "methods": [ + "report_instance_info" + ] + }, + "ResetInstance": { + "methods": [ + "reset_instance" + ] + }, + "SetInstanceAccelerator": { + "methods": [ + "set_instance_accelerator" + ] + }, + "SetInstanceLabels": { + "methods": [ + "set_instance_labels" + ] + }, + "SetInstanceMachineType": { + "methods": [ + "set_instance_machine_type" + ] + }, + "StartInstance": { + "methods": [ + "start_instance" + ] + }, + "StopInstance": { + "methods": [ + "stop_instance" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + }, + "UpgradeInstanceInternal": { + "methods": [ + "upgrade_instance_internal" + ] + } + } + }, + "grpc-async": { + "libraryClient": "NotebookServiceAsyncClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "IsInstanceUpgradeable": { + "methods": [ + "is_instance_upgradeable" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "RegisterInstance": { + "methods": [ + "register_instance" + ] + }, + "ReportInstanceInfo": { + "methods": [ + "report_instance_info" + ] + }, + "ResetInstance": { + "methods": [ + "reset_instance" + ] + }, + "SetInstanceAccelerator": { + "methods": [ + "set_instance_accelerator" + ] + }, + "SetInstanceLabels": { + "methods": [ + "set_instance_labels" + ] + }, + "SetInstanceMachineType": { + "methods": [ + "set_instance_machine_type" + ] + }, + "StartInstance": { + "methods": [ + "start_instance" + ] + }, + "StopInstance": { + "methods": [ + "stop_instance" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + }, + "UpgradeInstanceInternal": { + "methods": [ + "upgrade_instance_internal" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/notebooks_v1beta1/services/__init__.py b/google/cloud/notebooks_v1beta1/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/notebooks_v1beta1/services/__init__.py +++ b/google/cloud/notebooks_v1beta1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py b/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py index 92c4e29..a17402b 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import NotebookServiceClient from .async_client import NotebookServiceAsyncClient diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py index 98eb73d..18b6efd 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,10 +20,10 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore @@ -34,9 +32,8 @@ from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport from .client import NotebookServiceClient @@ -54,31 +51,26 @@ class NotebookServiceAsyncClient: parse_environment_path = staticmethod(NotebookServiceClient.parse_environment_path) instance_path = staticmethod(NotebookServiceClient.instance_path) parse_instance_path = staticmethod(NotebookServiceClient.parse_instance_path) - common_billing_account_path = staticmethod( NotebookServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( NotebookServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(NotebookServiceClient.common_folder_path) parse_common_folder_path = staticmethod( NotebookServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( NotebookServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( NotebookServiceClient.parse_common_organization_path ) - common_project_path = staticmethod(NotebookServiceClient.common_project_path) parse_common_project_path = staticmethod( NotebookServiceClient.parse_common_project_path ) - common_location_path = staticmethod(NotebookServiceClient.common_location_path) parse_common_location_path = staticmethod( NotebookServiceClient.parse_common_location_path @@ -86,7 +78,8 @@ class NotebookServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -101,7 +94,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -118,7 +111,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> NotebookServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: NotebookServiceTransport: The transport used by the client instance. @@ -132,12 +125,12 @@ def transport(self) -> NotebookServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, NotebookServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the notebook service client. + """Instantiates the notebook service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -169,7 +162,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = NotebookServiceClient( credentials=credentials, transport=transport, @@ -191,7 +183,6 @@ async def list_instances( request (:class:`google.cloud.notebooks_v1beta1.types.ListInstancesRequest`): The request object. Request for listing notebook instances. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -208,7 +199,6 @@ async def list_instances( """ # Create or coerce a protobuf request object. - request = service.ListInstancesRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -251,7 +241,6 @@ async def get_instance( request (:class:`google.cloud.notebooks_v1beta1.types.GetInstanceRequest`): The request object. Request for getting a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -265,7 +254,6 @@ async def get_instance( """ # Create or coerce a protobuf request object. - request = service.GetInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -303,7 +291,6 @@ async def create_instance( request (:class:`google.cloud.notebooks_v1beta1.types.CreateInstanceRequest`): The request object. Request for creating a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -320,7 +307,6 @@ async def create_instance( """ # Create or coerce a protobuf request object. - request = service.CreateInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -370,7 +356,6 @@ async def register_instance( request (:class:`google.cloud.notebooks_v1beta1.types.RegisterInstanceRequest`): The request object. Request for registering a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -387,7 +372,6 @@ async def register_instance( """ # Create or coerce a protobuf request object. - request = service.RegisterInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -432,7 +416,6 @@ async def set_instance_accelerator( request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceAcceleratorRequest`): The request object. Request for setting instance accelerator. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -449,7 +432,6 @@ async def set_instance_accelerator( """ # Create or coerce a protobuf request object. - request = service.SetInstanceAcceleratorRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -494,7 +476,6 @@ async def set_instance_machine_type( request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceMachineTypeRequest`): The request object. Request for setting instance machine type. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -511,7 +492,6 @@ async def set_instance_machine_type( """ # Create or coerce a protobuf request object. - request = service.SetInstanceMachineTypeRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -555,7 +535,6 @@ async def set_instance_labels( Args: request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest`): The request object. Request for setting instance labels. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -572,7 +551,6 @@ async def set_instance_labels( """ # Create or coerce a protobuf request object. - request = service.SetInstanceLabelsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -617,7 +595,6 @@ async def delete_instance( request (:class:`google.cloud.notebooks_v1beta1.types.DeleteInstanceRequest`): The request object. Request for deleting a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -644,7 +621,6 @@ async def delete_instance( """ # Create or coerce a protobuf request object. - request = service.DeleteInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -668,7 +644,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=service.OperationMetadata, ) @@ -689,7 +665,6 @@ async def start_instance( request (:class:`google.cloud.notebooks_v1beta1.types.StartInstanceRequest`): The request object. Request for starting a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -706,7 +681,6 @@ async def start_instance( """ # Create or coerce a protobuf request object. - request = service.StartInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -751,7 +725,6 @@ async def stop_instance( request (:class:`google.cloud.notebooks_v1beta1.types.StopInstanceRequest`): The request object. Request for stopping a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -768,7 +741,6 @@ async def stop_instance( """ # Create or coerce a protobuf request object. - request = service.StopInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -813,7 +785,6 @@ async def reset_instance( request (:class:`google.cloud.notebooks_v1beta1.types.ResetInstanceRequest`): The request object. Request for reseting a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -830,7 +801,6 @@ async def reset_instance( """ # Create or coerce a protobuf request object. - request = service.ResetInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -879,7 +849,6 @@ async def report_instance_info( request (:class:`google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest`): The request object. Request for notebook instances to report information to Notebooks API. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -896,7 +865,6 @@ async def report_instance_info( """ # Create or coerce a protobuf request object. - request = service.ReportInstanceInfoRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -941,7 +909,6 @@ async def is_instance_upgradeable( request (:class:`google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest`): The request object. Request for checking if a notebook instance is upgradeable. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -955,7 +922,6 @@ async def is_instance_upgradeable( """ # Create or coerce a protobuf request object. - request = service.IsInstanceUpgradeableRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -994,7 +960,6 @@ async def upgrade_instance( request (:class:`google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest`): The request object. Request for upgrading a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1011,7 +976,6 @@ async def upgrade_instance( """ # Create or coerce a protobuf request object. - request = service.UpgradeInstanceRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1058,7 +1022,6 @@ async def upgrade_instance_internal( request (:class:`google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest`): The request object. Request for upgrading a notebook instance from within the VM - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1075,7 +1038,6 @@ async def upgrade_instance_internal( """ # Create or coerce a protobuf request object. - request = service.UpgradeInstanceInternalRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1119,7 +1081,6 @@ async def list_environments( Args: request (:class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest`): The request object. Request for listing environments. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1135,7 +1096,6 @@ async def list_environments( """ # Create or coerce a protobuf request object. - request = service.ListEnvironmentsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1178,7 +1138,6 @@ async def get_environment( request (:class:`google.cloud.notebooks_v1beta1.types.GetEnvironmentRequest`): The request object. Request for getting a notebook environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1193,7 +1152,6 @@ async def get_environment( """ # Create or coerce a protobuf request object. - request = service.GetEnvironmentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1230,7 +1188,6 @@ async def create_environment( request (:class:`google.cloud.notebooks_v1beta1.types.CreateEnvironmentRequest`): The request object. Request for creating a notebook environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1246,7 +1203,6 @@ async def create_environment( """ # Create or coerce a protobuf request object. - request = service.CreateEnvironmentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1291,7 +1247,6 @@ async def delete_environment( request (:class:`google.cloud.notebooks_v1beta1.types.DeleteEnvironmentRequest`): The request object. Request for deleting a notebook environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1318,7 +1273,6 @@ async def delete_environment( """ # Create or coerce a protobuf request object. - request = service.DeleteEnvironmentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1342,7 +1296,7 @@ async def delete_environment( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=service.OperationMetadata, ) diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py index 97acda4..3ab6f94 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -38,9 +36,8 @@ from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import NotebookServiceGrpcTransport from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport @@ -61,7 +58,7 @@ class NotebookServiceClientMeta(type): _transport_registry["grpc_asyncio"] = NotebookServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[NotebookServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -84,7 +81,8 @@ class NotebookServiceClient(metaclass=NotebookServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -118,7 +116,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -135,7 +134,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -154,23 +153,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> NotebookServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - NotebookServiceTransport: The transport used by the client instance. + NotebookServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def environment_path(project: str, environment: str,) -> str: - """Return a fully-qualified environment string.""" + """Returns a fully-qualified environment string.""" return "projects/{project}/environments/{environment}".format( project=project, environment=environment, ) @staticmethod def parse_environment_path(path: str) -> Dict[str, str]: - """Parse a environment path into its component segments.""" + """Parses a environment path into its component segments.""" m = re.match( r"^projects/(?P.+?)/environments/(?P.+?)$", path ) @@ -178,20 +178,20 @@ def parse_environment_path(path: str) -> Dict[str, str]: @staticmethod def instance_path(project: str, instance: str,) -> str: - """Return a fully-qualified instance string.""" + """Returns a fully-qualified instance string.""" return "projects/{project}/instances/{instance}".format( project=project, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parse a instance path into its component segments.""" + """Parses a instance path into its component segments.""" m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -204,7 +204,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -215,7 +215,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -226,7 +226,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -237,7 +237,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -251,12 +251,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, NotebookServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the notebook service client. + """Instantiates the notebook service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -311,9 +311,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -325,12 +326,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -345,8 +348,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -375,7 +378,6 @@ def list_instances( request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest): The request object. Request for listing notebook instances. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -392,7 +394,6 @@ def list_instances( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.ListInstancesRequest. # There's no risk of modifying the input as we've already verified @@ -436,7 +437,6 @@ def get_instance( request (google.cloud.notebooks_v1beta1.types.GetInstanceRequest): The request object. Request for getting a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -450,7 +450,6 @@ def get_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.GetInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -489,7 +488,6 @@ def create_instance( request (google.cloud.notebooks_v1beta1.types.CreateInstanceRequest): The request object. Request for creating a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -506,7 +504,6 @@ def create_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.CreateInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -557,7 +554,6 @@ def register_instance( request (google.cloud.notebooks_v1beta1.types.RegisterInstanceRequest): The request object. Request for registering a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -574,7 +570,6 @@ def register_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.RegisterInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -620,7 +615,6 @@ def set_instance_accelerator( request (google.cloud.notebooks_v1beta1.types.SetInstanceAcceleratorRequest): The request object. Request for setting instance accelerator. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -637,7 +631,6 @@ def set_instance_accelerator( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.SetInstanceAcceleratorRequest. # There's no risk of modifying the input as we've already verified @@ -683,7 +676,6 @@ def set_instance_machine_type( request (google.cloud.notebooks_v1beta1.types.SetInstanceMachineTypeRequest): The request object. Request for setting instance machine type. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -700,7 +692,6 @@ def set_instance_machine_type( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.SetInstanceMachineTypeRequest. # There's no risk of modifying the input as we've already verified @@ -747,7 +738,6 @@ def set_instance_labels( Args: request (google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest): The request object. Request for setting instance labels. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -764,7 +754,6 @@ def set_instance_labels( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.SetInstanceLabelsRequest. # There's no risk of modifying the input as we've already verified @@ -810,7 +799,6 @@ def delete_instance( request (google.cloud.notebooks_v1beta1.types.DeleteInstanceRequest): The request object. Request for deleting a notebook instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -837,7 +825,6 @@ def delete_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.DeleteInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -862,7 +849,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=service.OperationMetadata, ) @@ -883,7 +870,6 @@ def start_instance( request (google.cloud.notebooks_v1beta1.types.StartInstanceRequest): The request object. Request for starting a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -900,7 +886,6 @@ def start_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.StartInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -946,7 +931,6 @@ def stop_instance( request (google.cloud.notebooks_v1beta1.types.StopInstanceRequest): The request object. Request for stopping a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -963,7 +947,6 @@ def stop_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.StopInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -1009,7 +992,6 @@ def reset_instance( request (google.cloud.notebooks_v1beta1.types.ResetInstanceRequest): The request object. Request for reseting a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1026,7 +1008,6 @@ def reset_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.ResetInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -1076,7 +1057,6 @@ def report_instance_info( request (google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest): The request object. Request for notebook instances to report information to Notebooks API. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1093,7 +1073,6 @@ def report_instance_info( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.ReportInstanceInfoRequest. # There's no risk of modifying the input as we've already verified @@ -1139,7 +1118,6 @@ def is_instance_upgradeable( request (google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest): The request object. Request for checking if a notebook instance is upgradeable. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1153,7 +1131,6 @@ def is_instance_upgradeable( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.IsInstanceUpgradeableRequest. # There's no risk of modifying the input as we've already verified @@ -1193,7 +1170,6 @@ def upgrade_instance( request (google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest): The request object. Request for upgrading a notebook instance - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1210,7 +1186,6 @@ def upgrade_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.UpgradeInstanceRequest. # There's no risk of modifying the input as we've already verified @@ -1258,7 +1233,6 @@ def upgrade_instance_internal( request (google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest): The request object. Request for upgrading a notebook instance from within the VM - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1275,7 +1249,6 @@ def upgrade_instance_internal( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.UpgradeInstanceInternalRequest. # There's no risk of modifying the input as we've already verified @@ -1322,7 +1295,6 @@ def list_environments( Args: request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest): The request object. Request for listing environments. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1338,7 +1310,6 @@ def list_environments( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.ListEnvironmentsRequest. # There's no risk of modifying the input as we've already verified @@ -1382,7 +1353,6 @@ def get_environment( request (google.cloud.notebooks_v1beta1.types.GetEnvironmentRequest): The request object. Request for getting a notebook environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1397,7 +1367,6 @@ def get_environment( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.GetEnvironmentRequest. # There's no risk of modifying the input as we've already verified @@ -1435,7 +1404,6 @@ def create_environment( request (google.cloud.notebooks_v1beta1.types.CreateEnvironmentRequest): The request object. Request for creating a notebook environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1451,7 +1419,6 @@ def create_environment( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.CreateEnvironmentRequest. # There's no risk of modifying the input as we've already verified @@ -1497,7 +1464,6 @@ def delete_environment( request (google.cloud.notebooks_v1beta1.types.DeleteEnvironmentRequest): The request object. Request for deleting a notebook environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1524,7 +1490,6 @@ def delete_environment( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a service.DeleteEnvironmentRequest. # There's no risk of modifying the input as we've already verified @@ -1549,7 +1514,7 @@ def delete_environment( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=service.OperationMetadata, ) diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py b/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py index ec305f6..d918331 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -247,7 +245,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py index bc1ba94..9c6f2a1 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py index d72ee7b..affbf2d 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -39,27 +38,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class NotebookServiceTransport(abc.ABC): """Abstract transport class for NotebookService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "notebooks.googleapis.com" + def __init__( self, *, - host: str = "notebooks.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -68,7 +81,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -82,29 +95,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -185,122 +245,119 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> typing.Callable[ + ) -> Callable[ [service.ListInstancesRequest], - typing.Union[ - service.ListInstancesResponse, - typing.Awaitable[service.ListInstancesResponse], - ], + Union[service.ListInstancesResponse, Awaitable[service.ListInstancesResponse]], ]: raise NotImplementedError() @property def get_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.GetInstanceRequest], - typing.Union[instance.Instance, typing.Awaitable[instance.Instance]], + Union[instance.Instance, Awaitable[instance.Instance]], ]: raise NotImplementedError() @property def create_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.CreateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def register_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.RegisterInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def set_instance_accelerator( self, - ) -> typing.Callable[ + ) -> Callable[ [service.SetInstanceAcceleratorRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def set_instance_machine_type( self, - ) -> typing.Callable[ + ) -> Callable[ [service.SetInstanceMachineTypeRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def set_instance_labels( self, - ) -> typing.Callable[ + ) -> Callable[ [service.SetInstanceLabelsRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.DeleteInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def start_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.StartInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def stop_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.StopInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def reset_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.ResetInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def report_instance_info( self, - ) -> typing.Callable[ + ) -> Callable[ [service.ReportInstanceInfoRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def is_instance_upgradeable( self, - ) -> typing.Callable[ + ) -> Callable[ [service.IsInstanceUpgradeableRequest], - typing.Union[ + Union[ service.IsInstanceUpgradeableResponse, - typing.Awaitable[service.IsInstanceUpgradeableResponse], + Awaitable[service.IsInstanceUpgradeableResponse], ], ]: raise NotImplementedError() @@ -308,29 +365,29 @@ def is_instance_upgradeable( @property def upgrade_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [service.UpgradeInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def upgrade_instance_internal( self, - ) -> typing.Callable[ + ) -> Callable[ [service.UpgradeInstanceInternalRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_environments( self, - ) -> typing.Callable[ + ) -> Callable[ [service.ListEnvironmentsRequest], - typing.Union[ + Union[ service.ListEnvironmentsResponse, - typing.Awaitable[service.ListEnvironmentsResponse], + Awaitable[service.ListEnvironmentsResponse], ], ]: raise NotImplementedError() @@ -338,29 +395,27 @@ def list_environments( @property def get_environment( self, - ) -> typing.Callable[ + ) -> Callable[ [service.GetEnvironmentRequest], - typing.Union[ - environment.Environment, typing.Awaitable[environment.Environment] - ], + Union[environment.Environment, Awaitable[environment.Environment]], ]: raise NotImplementedError() @property def create_environment( self, - ) -> typing.Callable[ + ) -> Callable[ [service.CreateEnvironmentRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_environment( self, - ) -> typing.Callable[ + ) -> Callable[ [service.DeleteEnvironmentRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py index 5aed906..fe6a527 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -30,8 +28,7 @@ from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "notebooks.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -68,7 +65,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -179,7 +177,7 @@ def __init__( def create_channel( cls, host: str = "notebooks.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +208,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -293,7 +293,7 @@ def get_instance(self) -> Callable[[service.GetInstanceRequest], instance.Instan @property def create_instance( self, - ) -> Callable[[service.CreateInstanceRequest], operations.Operation]: + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given project and @@ -313,14 +313,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/CreateInstance", request_serializer=service.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def register_instance( self, - ) -> Callable[[service.RegisterInstanceRequest], operations.Operation]: + ) -> Callable[[service.RegisterInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the register instance method over gRPC. Registers an existing legacy notebook instance to the @@ -344,14 +344,14 @@ def register_instance( self._stubs["register_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/RegisterInstance", request_serializer=service.RegisterInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["register_instance"] @property def set_instance_accelerator( self, - ) -> Callable[[service.SetInstanceAcceleratorRequest], operations.Operation]: + ) -> Callable[[service.SetInstanceAcceleratorRequest], operations_pb2.Operation]: r"""Return a callable for the set instance accelerator method over gRPC. Updates the guest accelerators of a single Instance. @@ -370,14 +370,14 @@ def set_instance_accelerator( self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceAccelerator", request_serializer=service.SetInstanceAcceleratorRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_accelerator"] @property def set_instance_machine_type( self, - ) -> Callable[[service.SetInstanceMachineTypeRequest], operations.Operation]: + ) -> Callable[[service.SetInstanceMachineTypeRequest], operations_pb2.Operation]: r"""Return a callable for the set instance machine type method over gRPC. Updates the machine type of a single Instance. @@ -396,14 +396,14 @@ def set_instance_machine_type( self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceMachineType", request_serializer=service.SetInstanceMachineTypeRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_machine_type"] @property def set_instance_labels( self, - ) -> Callable[[service.SetInstanceLabelsRequest], operations.Operation]: + ) -> Callable[[service.SetInstanceLabelsRequest], operations_pb2.Operation]: r"""Return a callable for the set instance labels method over gRPC. Updates the labels of an Instance. @@ -422,14 +422,14 @@ def set_instance_labels( self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceLabels", request_serializer=service.SetInstanceLabelsRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_labels"] @property def delete_instance( self, - ) -> Callable[[service.DeleteInstanceRequest], operations.Operation]: + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -448,14 +448,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/DeleteInstance", request_serializer=service.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def start_instance( self, - ) -> Callable[[service.StartInstanceRequest], operations.Operation]: + ) -> Callable[[service.StartInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the start instance method over gRPC. Starts a notebook instance. @@ -474,14 +474,14 @@ def start_instance( self._stubs["start_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/StartInstance", request_serializer=service.StartInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["start_instance"] @property def stop_instance( self, - ) -> Callable[[service.StopInstanceRequest], operations.Operation]: + ) -> Callable[[service.StopInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the stop instance method over gRPC. Stops a notebook instance. @@ -500,14 +500,14 @@ def stop_instance( self._stubs["stop_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/StopInstance", request_serializer=service.StopInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["stop_instance"] @property def reset_instance( self, - ) -> Callable[[service.ResetInstanceRequest], operations.Operation]: + ) -> Callable[[service.ResetInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the reset instance method over gRPC. Resets a notebook instance. @@ -526,14 +526,14 @@ def reset_instance( self._stubs["reset_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ResetInstance", request_serializer=service.ResetInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["reset_instance"] @property def report_instance_info( self, - ) -> Callable[[service.ReportInstanceInfoRequest], operations.Operation]: + ) -> Callable[[service.ReportInstanceInfoRequest], operations_pb2.Operation]: r"""Return a callable for the report instance info method over gRPC. Allows notebook instances to @@ -556,7 +556,7 @@ def report_instance_info( self._stubs["report_instance_info"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ReportInstanceInfo", request_serializer=service.ReportInstanceInfoRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["report_instance_info"] @@ -591,7 +591,7 @@ def is_instance_upgradeable( @property def upgrade_instance( self, - ) -> Callable[[service.UpgradeInstanceRequest], operations.Operation]: + ) -> Callable[[service.UpgradeInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the upgrade instance method over gRPC. Upgrades a notebook instance to the latest version. @@ -610,14 +610,14 @@ def upgrade_instance( self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstance", request_serializer=service.UpgradeInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["upgrade_instance"] @property def upgrade_instance_internal( self, - ) -> Callable[[service.UpgradeInstanceInternalRequest], operations.Operation]: + ) -> Callable[[service.UpgradeInstanceInternalRequest], operations_pb2.Operation]: r"""Return a callable for the upgrade instance internal method over gRPC. Allows notebook instances to @@ -638,7 +638,7 @@ def upgrade_instance_internal( self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstanceInternal", request_serializer=service.UpgradeInstanceInternalRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["upgrade_instance_internal"] @@ -697,7 +697,7 @@ def get_environment( @property def create_environment( self, - ) -> Callable[[service.CreateEnvironmentRequest], operations.Operation]: + ) -> Callable[[service.CreateEnvironmentRequest], operations_pb2.Operation]: r"""Return a callable for the create environment method over gRPC. Creates a new Environment. @@ -716,14 +716,14 @@ def create_environment( self._stubs["create_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/CreateEnvironment", request_serializer=service.CreateEnvironmentRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_environment"] @property def delete_environment( self, - ) -> Callable[[service.DeleteEnvironmentRequest], operations.Operation]: + ) -> Callable[[service.DeleteEnvironmentRequest], operations_pb2.Operation]: r"""Return a callable for the delete environment method over gRPC. Deletes a single Environment. @@ -742,7 +742,7 @@ def delete_environment( self._stubs["delete_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/DeleteEnvironment", request_serializer=service.DeleteEnvironmentRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_environment"] diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py index 9cf81f5..a7b0fc3 100644 --- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py +++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -31,8 +29,7 @@ from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO from .grpc import NotebookServiceGrpcTransport @@ -57,7 +54,7 @@ class NotebookServiceGrpcAsyncIOTransport(NotebookServiceTransport): def create_channel( cls, host: str = "notebooks.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "notebooks.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -112,7 +111,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -171,7 +171,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -303,7 +302,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given project and @@ -323,14 +322,16 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/CreateInstance", request_serializer=service.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def register_instance( self, - ) -> Callable[[service.RegisterInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [service.RegisterInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the register instance method over gRPC. Registers an existing legacy notebook instance to the @@ -354,7 +355,7 @@ def register_instance( self._stubs["register_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/RegisterInstance", request_serializer=service.RegisterInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["register_instance"] @@ -362,7 +363,7 @@ def register_instance( def set_instance_accelerator( self, ) -> Callable[ - [service.SetInstanceAcceleratorRequest], Awaitable[operations.Operation] + [service.SetInstanceAcceleratorRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the set instance accelerator method over gRPC. @@ -382,7 +383,7 @@ def set_instance_accelerator( self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceAccelerator", request_serializer=service.SetInstanceAcceleratorRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_accelerator"] @@ -390,7 +391,7 @@ def set_instance_accelerator( def set_instance_machine_type( self, ) -> Callable[ - [service.SetInstanceMachineTypeRequest], Awaitable[operations.Operation] + [service.SetInstanceMachineTypeRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the set instance machine type method over gRPC. @@ -410,14 +411,16 @@ def set_instance_machine_type( self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceMachineType", request_serializer=service.SetInstanceMachineTypeRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_machine_type"] @property def set_instance_labels( self, - ) -> Callable[[service.SetInstanceLabelsRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [service.SetInstanceLabelsRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the set instance labels method over gRPC. Updates the labels of an Instance. @@ -436,14 +439,14 @@ def set_instance_labels( self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceLabels", request_serializer=service.SetInstanceLabelsRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_labels"] @property def delete_instance( self, - ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -462,14 +465,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/DeleteInstance", request_serializer=service.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def start_instance( self, - ) -> Callable[[service.StartInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[[service.StartInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the start instance method over gRPC. Starts a notebook instance. @@ -488,14 +491,14 @@ def start_instance( self._stubs["start_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/StartInstance", request_serializer=service.StartInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["start_instance"] @property def stop_instance( self, - ) -> Callable[[service.StopInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[[service.StopInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the stop instance method over gRPC. Stops a notebook instance. @@ -514,14 +517,14 @@ def stop_instance( self._stubs["stop_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/StopInstance", request_serializer=service.StopInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["stop_instance"] @property def reset_instance( self, - ) -> Callable[[service.ResetInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[[service.ResetInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the reset instance method over gRPC. Resets a notebook instance. @@ -540,14 +543,16 @@ def reset_instance( self._stubs["reset_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ResetInstance", request_serializer=service.ResetInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["reset_instance"] @property def report_instance_info( self, - ) -> Callable[[service.ReportInstanceInfoRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [service.ReportInstanceInfoRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the report instance info method over gRPC. Allows notebook instances to @@ -570,7 +575,7 @@ def report_instance_info( self._stubs["report_instance_info"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ReportInstanceInfo", request_serializer=service.ReportInstanceInfoRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["report_instance_info"] @@ -606,7 +611,9 @@ def is_instance_upgradeable( @property def upgrade_instance( self, - ) -> Callable[[service.UpgradeInstanceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [service.UpgradeInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the upgrade instance method over gRPC. Upgrades a notebook instance to the latest version. @@ -625,7 +632,7 @@ def upgrade_instance( self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstance", request_serializer=service.UpgradeInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["upgrade_instance"] @@ -633,7 +640,7 @@ def upgrade_instance( def upgrade_instance_internal( self, ) -> Callable[ - [service.UpgradeInstanceInternalRequest], Awaitable[operations.Operation] + [service.UpgradeInstanceInternalRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the upgrade instance internal method over gRPC. @@ -655,7 +662,7 @@ def upgrade_instance_internal( self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstanceInternal", request_serializer=service.UpgradeInstanceInternalRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["upgrade_instance_internal"] @@ -716,7 +723,9 @@ def get_environment( @property def create_environment( self, - ) -> Callable[[service.CreateEnvironmentRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [service.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create environment method over gRPC. Creates a new Environment. @@ -735,14 +744,16 @@ def create_environment( self._stubs["create_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/CreateEnvironment", request_serializer=service.CreateEnvironmentRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_environment"] @property def delete_environment( self, - ) -> Callable[[service.DeleteEnvironmentRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [service.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete environment method over gRPC. Deletes a single Environment. @@ -761,7 +772,7 @@ def delete_environment( self._stubs["delete_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/DeleteEnvironment", request_serializer=service.DeleteEnvironmentRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_environment"] diff --git a/google/cloud/notebooks_v1beta1/types/__init__.py b/google/cloud/notebooks_v1beta1/types/__init__.py index 6b8325a..6982c3d 100644 --- a/google/cloud/notebooks_v1beta1/types/__init__.py +++ b/google/cloud/notebooks_v1beta1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .environment import ( ContainerImage, Environment, diff --git a/google/cloud/notebooks_v1beta1/types/environment.py b/google/cloud/notebooks_v1beta1/types/environment.py index 4e02c7e..5d2aeac 100644 --- a/google/cloud/notebooks_v1beta1/types/environment.py +++ b/google/cloud/notebooks_v1beta1/types/environment.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -55,23 +52,17 @@ class Environment(proto.Message): environment was created. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) vm_image = proto.Field( proto.MESSAGE, number=6, oneof="image_type", message="VmImage", ) - container_image = proto.Field( proto.MESSAGE, number=7, oneof="image_type", message="ContainerImage", ) - - post_startup_script = proto.Field(proto.STRING, number=8) - - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) + post_startup_script = proto.Field(proto.STRING, number=8,) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) class VmImage(proto.Message): @@ -90,11 +81,9 @@ class VmImage(proto.Message): the newest image in this family will be used. """ - project = proto.Field(proto.STRING, number=1) - - image_name = proto.Field(proto.STRING, number=2, oneof="image") - - image_family = proto.Field(proto.STRING, number=3, oneof="image") + project = proto.Field(proto.STRING, number=1,) + image_name = proto.Field(proto.STRING, number=2, oneof="image",) + image_family = proto.Field(proto.STRING, number=3, oneof="image",) class ContainerImage(proto.Message): @@ -110,9 +99,8 @@ class ContainerImage(proto.Message): specified, this defaults to the latest tag. """ - repository = proto.Field(proto.STRING, number=1) - - tag = proto.Field(proto.STRING, number=2) + repository = proto.Field(proto.STRING, number=1,) + tag = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1beta1/types/instance.py b/google/cloud/notebooks_v1beta1/types/instance.py index 636825e..3f862be 100644 --- a/google/cloud/notebooks_v1beta1/types/instance.py +++ b/google/cloud/notebooks_v1beta1/types/instance.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.notebooks_v1beta1.types import environment -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -29,7 +26,6 @@ class Instance(proto.Message): r"""The definition of a notebook instance. - Attributes: name (str): Output only. The name of this notebook instance. Format: @@ -198,71 +194,48 @@ class AcceleratorConfig(proto.Message): """ type_ = proto.Field(proto.ENUM, number=1, enum="Instance.AcceleratorType",) + core_count = proto.Field(proto.INT64, number=2,) - core_count = proto.Field(proto.INT64, number=2) - - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) vm_image = proto.Field( proto.MESSAGE, number=2, oneof="environment", message=environment.VmImage, ) - container_image = proto.Field( proto.MESSAGE, number=3, oneof="environment", message=environment.ContainerImage, ) - - post_startup_script = proto.Field(proto.STRING, number=4) - - proxy_uri = proto.Field(proto.STRING, number=5) - - instance_owners = proto.RepeatedField(proto.STRING, number=6) - - service_account = proto.Field(proto.STRING, number=7) - - machine_type = proto.Field(proto.STRING, number=8) - + post_startup_script = proto.Field(proto.STRING, number=4,) + proxy_uri = proto.Field(proto.STRING, number=5,) + instance_owners = proto.RepeatedField(proto.STRING, number=6,) + service_account = proto.Field(proto.STRING, number=7,) + machine_type = proto.Field(proto.STRING, number=8,) accelerator_config = proto.Field( proto.MESSAGE, number=9, message=AcceleratorConfig, ) - state = proto.Field(proto.ENUM, number=10, enum=State,) - - install_gpu_driver = proto.Field(proto.BOOL, number=11) - - custom_gpu_driver_path = proto.Field(proto.STRING, number=12) - + install_gpu_driver = proto.Field(proto.BOOL, number=11,) + custom_gpu_driver_path = proto.Field(proto.STRING, number=12,) boot_disk_type = proto.Field(proto.ENUM, number=13, enum=DiskType,) - - boot_disk_size_gb = proto.Field(proto.INT64, number=14) - + boot_disk_size_gb = proto.Field(proto.INT64, number=14,) data_disk_type = proto.Field(proto.ENUM, number=25, enum=DiskType,) - - data_disk_size_gb = proto.Field(proto.INT64, number=26) - - no_remove_data_disk = proto.Field(proto.BOOL, number=27) - + data_disk_size_gb = proto.Field(proto.INT64, number=26,) + no_remove_data_disk = proto.Field(proto.BOOL, number=27,) disk_encryption = proto.Field(proto.ENUM, number=15, enum=DiskEncryption,) - - kms_key = proto.Field(proto.STRING, number=16) - - no_public_ip = proto.Field(proto.BOOL, number=17) - - no_proxy_access = proto.Field(proto.BOOL, number=18) - - network = proto.Field(proto.STRING, number=19) - - subnet = proto.Field(proto.STRING, number=20) - - labels = proto.MapField(proto.STRING, proto.STRING, number=21) - - metadata = proto.MapField(proto.STRING, proto.STRING, number=22) - - create_time = proto.Field(proto.MESSAGE, number=23, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=24, message=timestamp.Timestamp,) + kms_key = proto.Field(proto.STRING, number=16,) + no_public_ip = proto.Field(proto.BOOL, number=17,) + no_proxy_access = proto.Field(proto.BOOL, number=18,) + network = proto.Field(proto.STRING, number=19,) + subnet = proto.Field(proto.STRING, number=20,) + labels = proto.MapField(proto.STRING, proto.STRING, number=21,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=22,) + create_time = proto.Field( + proto.MESSAGE, number=23, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1beta1/types/service.py b/google/cloud/notebooks_v1beta1/types/service.py index ddabeea..e25802a 100644 --- a/google/cloud/notebooks_v1beta1/types/service.py +++ b/google/cloud/notebooks_v1beta1/types/service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.notebooks_v1beta1.types import environment as gcn_environment from google.cloud.notebooks_v1beta1.types import instance as gcn_instance -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -55,7 +52,6 @@ class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): The time the operation was created. @@ -81,26 +77,18 @@ class OperationMetadata(proto.Message): API endpoint name of this operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_message = proto.Field(proto.STRING, number=5) - - requested_cancellation = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) - - endpoint = proto.Field(proto.STRING, number=8) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_message = proto.Field(proto.STRING, number=5,) + requested_cancellation = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) + endpoint = proto.Field(proto.STRING, number=8,) class ListInstancesRequest(proto.Message): r"""Request for listing notebook instances. - Attributes: parent (str): Required. Format: @@ -112,16 +100,13 @@ class ListInstancesRequest(proto.Message): used to continue listing from the last result. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListInstancesResponse(proto.Message): r"""Response for listing notebook instances. - Attributes: instances (Sequence[google.cloud.notebooks_v1beta1.types.Instance]): A list of returned instances. @@ -142,27 +127,23 @@ def raw_page(self): instances = proto.RepeatedField( proto.MESSAGE, number=1, message=gcn_instance.Instance, ) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetInstanceRequest(proto.Message): r"""Request for getting a notebook instance. - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateInstanceRequest(proto.Message): r"""Request for creating a notebook instance. - Attributes: parent (str): Required. Format: @@ -174,16 +155,13 @@ class CreateInstanceRequest(proto.Message): Required. The instance to be created. """ - parent = proto.Field(proto.STRING, number=1) - - instance_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) instance = proto.Field(proto.MESSAGE, number=3, message=gcn_instance.Instance,) class RegisterInstanceRequest(proto.Message): r"""Request for registering a notebook instance. - Attributes: parent (str): Required. Format: @@ -196,14 +174,12 @@ class RegisterInstanceRequest(proto.Message): character cannot be a dash. """ - parent = proto.Field(proto.STRING, number=1) - - instance_id = proto.Field(proto.STRING, number=2) + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) class SetInstanceAcceleratorRequest(proto.Message): r"""Request for setting instance accelerator. - Attributes: name (str): Required. Format: @@ -218,18 +194,15 @@ class SetInstanceAcceleratorRequest(proto.Message): to find a valid combination. TPUs are not supported. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) type_ = proto.Field( proto.ENUM, number=2, enum=gcn_instance.Instance.AcceleratorType, ) - - core_count = proto.Field(proto.INT64, number=3) + core_count = proto.Field(proto.INT64, number=3,) class SetInstanceMachineTypeRequest(proto.Message): r"""Request for setting instance machine type. - Attributes: name (str): Required. Format: @@ -239,14 +212,12 @@ class SetInstanceMachineTypeRequest(proto.Message): type `__. """ - name = proto.Field(proto.STRING, number=1) - - machine_type = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + machine_type = proto.Field(proto.STRING, number=2,) class SetInstanceLabelsRequest(proto.Message): r"""Request for setting instance labels. - Attributes: name (str): Required. Format: @@ -257,57 +228,52 @@ class SetInstanceLabelsRequest(proto.Message): method """ - name = proto.Field(proto.STRING, number=1) - - labels = proto.MapField(proto.STRING, proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + labels = proto.MapField(proto.STRING, proto.STRING, number=2,) class DeleteInstanceRequest(proto.Message): r"""Request for deleting a notebook instance. - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class StartInstanceRequest(proto.Message): r"""Request for starting a notebook instance - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class StopInstanceRequest(proto.Message): r"""Request for stopping a notebook instance - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ResetInstanceRequest(proto.Message): r"""Request for reseting a notebook instance - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ReportInstanceInfoRequest(proto.Message): @@ -328,28 +294,24 @@ class ReportInstanceInfoRequest(proto.Message): will be merged to the instance metadata store """ - name = proto.Field(proto.STRING, number=1) - - vm_id = proto.Field(proto.STRING, number=2) - - metadata = proto.MapField(proto.STRING, proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + vm_id = proto.Field(proto.STRING, number=2,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=3,) class IsInstanceUpgradeableRequest(proto.Message): r"""Request for checking if a notebook instance is upgradeable. - Attributes: notebook_instance (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - notebook_instance = proto.Field(proto.STRING, number=1) + notebook_instance = proto.Field(proto.STRING, number=1,) class IsInstanceUpgradeableResponse(proto.Message): r"""Response for checking if a notebook instance is upgradeable. - Attributes: upgradeable (bool): If an instance is upgradeable. @@ -361,28 +323,24 @@ class IsInstanceUpgradeableResponse(proto.Message): Additional information about upgrade. """ - upgradeable = proto.Field(proto.BOOL, number=1) - - upgrade_version = proto.Field(proto.STRING, number=2) - - upgrade_info = proto.Field(proto.STRING, number=3) + upgradeable = proto.Field(proto.BOOL, number=1,) + upgrade_version = proto.Field(proto.STRING, number=2,) + upgrade_info = proto.Field(proto.STRING, number=3,) class UpgradeInstanceRequest(proto.Message): r"""Request for upgrading a notebook instance - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/instances/{instance_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpgradeInstanceInternalRequest(proto.Message): r"""Request for upgrading a notebook instance from within the VM - Attributes: name (str): Required. Format: @@ -394,14 +352,12 @@ class UpgradeInstanceInternalRequest(proto.Message): instance-identity """ - name = proto.Field(proto.STRING, number=1) - - vm_id = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + vm_id = proto.Field(proto.STRING, number=2,) class ListEnvironmentsRequest(proto.Message): r"""Request for listing environments. - Attributes: parent (str): Required. Format: @@ -413,16 +369,13 @@ class ListEnvironmentsRequest(proto.Message): used to continue listing from the last result. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListEnvironmentsResponse(proto.Message): r"""Response for listing environments. - Attributes: environments (Sequence[google.cloud.notebooks_v1beta1.types.Environment]): A list of returned environments. @@ -441,27 +394,23 @@ def raw_page(self): environments = proto.RepeatedField( proto.MESSAGE, number=1, message=gcn_environment.Environment, ) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetEnvironmentRequest(proto.Message): r"""Request for getting a notebook environment. - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/environments/{environment_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateEnvironmentRequest(proto.Message): r"""Request for creating a notebook environment. - Attributes: parent (str): Required. Format: @@ -476,10 +425,8 @@ class CreateEnvironmentRequest(proto.Message): Required. The environment to be created. """ - parent = proto.Field(proto.STRING, number=1) - - environment_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + environment_id = proto.Field(proto.STRING, number=2,) environment = proto.Field( proto.MESSAGE, number=3, message=gcn_environment.Environment, ) @@ -487,14 +434,13 @@ class CreateEnvironmentRequest(proto.Message): class DeleteEnvironmentRequest(proto.Message): r"""Request for deleting a notebook environment. - Attributes: name (str): Required. Format: ``projects/{project_id}/locations/{location}/environments/{environment_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/notebooks_v1beta1/__init__.py b/tests/unit/gapic/notebooks_v1beta1/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/notebooks_v1beta1/__init__.py +++ b/tests/unit/gapic/notebooks_v1beta1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py index 97a18c7..d09166d 100644 --- a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py +++ b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.notebooks_v1beta1.services.notebook_service import ( NotebookServiceAsyncClient, @@ -43,12 +42,42 @@ ) from google.cloud.notebooks_v1beta1.services.notebook_service import pagers from google.cloud.notebooks_v1beta1.services.notebook_service import transports +from google.cloud.notebooks_v1beta1.services.notebook_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.notebooks_v1beta1.services.notebook_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -99,7 +128,7 @@ def test__get_default_mtls_endpoint(): "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,] ) def test_notebook_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -116,7 +145,7 @@ def test_notebook_service_client_from_service_account_info(client_class): "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,] ) def test_notebook_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -169,7 +198,7 @@ def test_notebook_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(NotebookServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -467,7 +496,7 @@ def test_list_instances( transport: str = "grpc", request_type=service.ListInstancesRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -480,21 +509,16 @@ def test_list_instances( call.return_value = service.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.ListInstancesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -506,7 +530,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -514,7 +538,6 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListInstancesRequest() @@ -523,7 +546,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -539,20 +562,16 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) - response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -562,17 +581,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = service.ListInstancesResponse() - client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -587,11 +606,14 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -599,7 +621,6 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( service.ListInstancesResponse() ) - await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -613,7 +634,7 @@ async def test_list_instances_field_headers_async(): def test_list_instances_pager(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -651,7 +672,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -681,7 +702,9 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -718,7 +741,9 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -752,7 +777,7 @@ async def test_list_instances_async_pages(): def test_get_instance(transport: str = "grpc", request_type=service.GetInstanceRequest): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -785,57 +810,34 @@ def test_get_instance(transport: str = "grpc", request_type=service.GetInstanceR subnet="subnet_value", vm_image=environment.VmImage(project="project_value"), ) - response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.GetInstanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == "name_value" - assert response.post_startup_script == "post_startup_script_value" - assert response.proxy_uri == "proxy_uri_value" - assert response.instance_owners == ["instance_owners_value"] - assert response.service_account == "service_account_value" - assert response.machine_type == "machine_type_value" - assert response.state == instance.Instance.State.STARTING - assert response.install_gpu_driver is True - assert response.custom_gpu_driver_path == "custom_gpu_driver_path_value" - assert response.boot_disk_type == instance.Instance.DiskType.PD_STANDARD - assert response.boot_disk_size_gb == 1792 - assert response.data_disk_type == instance.Instance.DiskType.PD_STANDARD - assert response.data_disk_size_gb == 1766 - assert response.no_remove_data_disk is True - assert response.disk_encryption == instance.Instance.DiskEncryption.GMEK - assert response.kms_key == "kms_key_value" - assert response.no_public_ip is True - assert response.no_proxy_access is True - assert response.network == "network_value" - assert response.subnet == "subnet_value" @@ -847,7 +849,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -855,7 +857,6 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetInstanceRequest() @@ -864,7 +865,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -898,56 +899,34 @@ async def test_get_instance_async( subnet="subnet_value", ) ) - response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, instance.Instance) - assert response.name == "name_value" - assert response.post_startup_script == "post_startup_script_value" - assert response.proxy_uri == "proxy_uri_value" - assert response.instance_owners == ["instance_owners_value"] - assert response.service_account == "service_account_value" - assert response.machine_type == "machine_type_value" - assert response.state == instance.Instance.State.STARTING - assert response.install_gpu_driver is True - assert response.custom_gpu_driver_path == "custom_gpu_driver_path_value" - assert response.boot_disk_type == instance.Instance.DiskType.PD_STANDARD - assert response.boot_disk_size_gb == 1792 - assert response.data_disk_type == instance.Instance.DiskType.PD_STANDARD - assert response.data_disk_size_gb == 1766 - assert response.no_remove_data_disk is True - assert response.disk_encryption == instance.Instance.DiskEncryption.GMEK - assert response.kms_key == "kms_key_value" - assert response.no_public_ip is True - assert response.no_proxy_access is True - assert response.network == "network_value" - assert response.subnet == "subnet_value" @@ -957,17 +936,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = instance.Instance() - client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -982,17 +961,19 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) - await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1009,7 +990,7 @@ def test_create_instance( transport: str = "grpc", request_type=service.CreateInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1020,13 +1001,11 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1041,7 +1020,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1049,7 +1028,6 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateInstanceRequest() @@ -1058,7 +1036,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1071,13 +1049,11 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1090,17 +1066,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1115,11 +1091,14 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1127,7 +1106,6 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1144,7 +1122,7 @@ def test_register_instance( transport: str = "grpc", request_type=service.RegisterInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1157,13 +1135,11 @@ def test_register_instance( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.register_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.RegisterInstanceRequest() # Establish that the response is the type that we expect. @@ -1178,7 +1154,7 @@ def test_register_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1188,7 +1164,6 @@ def test_register_instance_empty_call(): client.register_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.RegisterInstanceRequest() @@ -1197,7 +1172,7 @@ async def test_register_instance_async( transport: str = "grpc_asyncio", request_type=service.RegisterInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1212,13 +1187,11 @@ async def test_register_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.register_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.RegisterInstanceRequest() # Establish that the response is the type that we expect. @@ -1231,11 +1204,12 @@ async def test_register_instance_async_from_dict(): def test_register_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.RegisterInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1243,7 +1217,6 @@ def test_register_instance_field_headers(): type(client.transport.register_instance), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.register_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1258,11 +1231,14 @@ def test_register_instance_field_headers(): @pytest.mark.asyncio async def test_register_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.RegisterInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1272,7 +1248,6 @@ async def test_register_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.register_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1289,7 +1264,7 @@ def test_set_instance_accelerator( transport: str = "grpc", request_type=service.SetInstanceAcceleratorRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1302,13 +1277,11 @@ def test_set_instance_accelerator( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.set_instance_accelerator(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceAcceleratorRequest() # Establish that the response is the type that we expect. @@ -1323,7 +1296,7 @@ def test_set_instance_accelerator_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1333,7 +1306,6 @@ def test_set_instance_accelerator_empty_call(): client.set_instance_accelerator() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceAcceleratorRequest() @@ -1342,7 +1314,7 @@ async def test_set_instance_accelerator_async( transport: str = "grpc_asyncio", request_type=service.SetInstanceAcceleratorRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1357,13 +1329,11 @@ async def test_set_instance_accelerator_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.set_instance_accelerator(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceAcceleratorRequest() # Establish that the response is the type that we expect. @@ -1376,11 +1346,12 @@ async def test_set_instance_accelerator_async_from_dict(): def test_set_instance_accelerator_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.SetInstanceAcceleratorRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1388,7 +1359,6 @@ def test_set_instance_accelerator_field_headers(): type(client.transport.set_instance_accelerator), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.set_instance_accelerator(request) # Establish that the underlying gRPC stub method was called. @@ -1403,11 +1373,14 @@ def test_set_instance_accelerator_field_headers(): @pytest.mark.asyncio async def test_set_instance_accelerator_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.SetInstanceAcceleratorRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1417,7 +1390,6 @@ async def test_set_instance_accelerator_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.set_instance_accelerator(request) # Establish that the underlying gRPC stub method was called. @@ -1434,7 +1406,7 @@ def test_set_instance_machine_type( transport: str = "grpc", request_type=service.SetInstanceMachineTypeRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1447,13 +1419,11 @@ def test_set_instance_machine_type( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.set_instance_machine_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceMachineTypeRequest() # Establish that the response is the type that we expect. @@ -1468,7 +1438,7 @@ def test_set_instance_machine_type_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1478,7 +1448,6 @@ def test_set_instance_machine_type_empty_call(): client.set_instance_machine_type() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceMachineTypeRequest() @@ -1487,7 +1456,7 @@ async def test_set_instance_machine_type_async( transport: str = "grpc_asyncio", request_type=service.SetInstanceMachineTypeRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1502,13 +1471,11 @@ async def test_set_instance_machine_type_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.set_instance_machine_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceMachineTypeRequest() # Establish that the response is the type that we expect. @@ -1521,11 +1488,12 @@ async def test_set_instance_machine_type_async_from_dict(): def test_set_instance_machine_type_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.SetInstanceMachineTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1533,7 +1501,6 @@ def test_set_instance_machine_type_field_headers(): type(client.transport.set_instance_machine_type), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.set_instance_machine_type(request) # Establish that the underlying gRPC stub method was called. @@ -1548,11 +1515,14 @@ def test_set_instance_machine_type_field_headers(): @pytest.mark.asyncio async def test_set_instance_machine_type_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.SetInstanceMachineTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1562,7 +1532,6 @@ async def test_set_instance_machine_type_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.set_instance_machine_type(request) # Establish that the underlying gRPC stub method was called. @@ -1579,7 +1548,7 @@ def test_set_instance_labels( transport: str = "grpc", request_type=service.SetInstanceLabelsRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1592,13 +1561,11 @@ def test_set_instance_labels( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.set_instance_labels(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceLabelsRequest() # Establish that the response is the type that we expect. @@ -1613,7 +1580,7 @@ def test_set_instance_labels_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1623,7 +1590,6 @@ def test_set_instance_labels_empty_call(): client.set_instance_labels() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceLabelsRequest() @@ -1632,7 +1598,7 @@ async def test_set_instance_labels_async( transport: str = "grpc_asyncio", request_type=service.SetInstanceLabelsRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1647,13 +1613,11 @@ async def test_set_instance_labels_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.set_instance_labels(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.SetInstanceLabelsRequest() # Establish that the response is the type that we expect. @@ -1666,11 +1630,12 @@ async def test_set_instance_labels_async_from_dict(): def test_set_instance_labels_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.SetInstanceLabelsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1678,7 +1643,6 @@ def test_set_instance_labels_field_headers(): type(client.transport.set_instance_labels), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.set_instance_labels(request) # Establish that the underlying gRPC stub method was called. @@ -1693,11 +1657,14 @@ def test_set_instance_labels_field_headers(): @pytest.mark.asyncio async def test_set_instance_labels_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.SetInstanceLabelsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1707,7 +1674,6 @@ async def test_set_instance_labels_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.set_instance_labels(request) # Establish that the underlying gRPC stub method was called. @@ -1724,7 +1690,7 @@ def test_delete_instance( transport: str = "grpc", request_type=service.DeleteInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1735,13 +1701,11 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1756,7 +1720,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1764,7 +1728,6 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteInstanceRequest() @@ -1773,7 +1736,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1786,13 +1749,11 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1805,17 +1766,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1830,11 +1791,14 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1842,7 +1806,6 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1859,7 +1822,7 @@ def test_start_instance( transport: str = "grpc", request_type=service.StartInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1870,13 +1833,11 @@ def test_start_instance( with mock.patch.object(type(client.transport.start_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.start_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.StartInstanceRequest() # Establish that the response is the type that we expect. @@ -1891,7 +1852,7 @@ def test_start_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1899,7 +1860,6 @@ def test_start_instance_empty_call(): client.start_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.StartInstanceRequest() @@ -1908,7 +1868,7 @@ async def test_start_instance_async( transport: str = "grpc_asyncio", request_type=service.StartInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1921,13 +1881,11 @@ async def test_start_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.start_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.StartInstanceRequest() # Establish that the response is the type that we expect. @@ -1940,17 +1898,17 @@ async def test_start_instance_async_from_dict(): def test_start_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.StartInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.start_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.start_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1965,11 +1923,14 @@ def test_start_instance_field_headers(): @pytest.mark.asyncio async def test_start_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.StartInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1977,7 +1938,6 @@ async def test_start_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.start_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1994,7 +1954,7 @@ def test_stop_instance( transport: str = "grpc", request_type=service.StopInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2005,13 +1965,11 @@ def test_stop_instance( with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.stop_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.StopInstanceRequest() # Establish that the response is the type that we expect. @@ -2026,7 +1984,7 @@ def test_stop_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2034,7 +1992,6 @@ def test_stop_instance_empty_call(): client.stop_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.StopInstanceRequest() @@ -2043,7 +2000,7 @@ async def test_stop_instance_async( transport: str = "grpc_asyncio", request_type=service.StopInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2056,13 +2013,11 @@ async def test_stop_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.StopInstanceRequest() # Establish that the response is the type that we expect. @@ -2075,17 +2030,17 @@ async def test_stop_instance_async_from_dict(): def test_stop_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.StopInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.stop_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2100,11 +2055,14 @@ def test_stop_instance_field_headers(): @pytest.mark.asyncio async def test_stop_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.StopInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2112,7 +2070,6 @@ async def test_stop_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2129,7 +2086,7 @@ def test_reset_instance( transport: str = "grpc", request_type=service.ResetInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2140,13 +2097,11 @@ def test_reset_instance( with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.reset_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.ResetInstanceRequest() # Establish that the response is the type that we expect. @@ -2161,7 +2116,7 @@ def test_reset_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2169,7 +2124,6 @@ def test_reset_instance_empty_call(): client.reset_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ResetInstanceRequest() @@ -2178,7 +2132,7 @@ async def test_reset_instance_async( transport: str = "grpc_asyncio", request_type=service.ResetInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2191,13 +2145,11 @@ async def test_reset_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.ResetInstanceRequest() # Establish that the response is the type that we expect. @@ -2210,17 +2162,17 @@ async def test_reset_instance_async_from_dict(): def test_reset_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ResetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.reset_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2235,11 +2187,14 @@ def test_reset_instance_field_headers(): @pytest.mark.asyncio async def test_reset_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ResetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2247,7 +2202,6 @@ async def test_reset_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2264,7 +2218,7 @@ def test_report_instance_info( transport: str = "grpc", request_type=service.ReportInstanceInfoRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2277,13 +2231,11 @@ def test_report_instance_info( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.report_instance_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.ReportInstanceInfoRequest() # Establish that the response is the type that we expect. @@ -2298,7 +2250,7 @@ def test_report_instance_info_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2308,7 +2260,6 @@ def test_report_instance_info_empty_call(): client.report_instance_info() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ReportInstanceInfoRequest() @@ -2317,7 +2268,7 @@ async def test_report_instance_info_async( transport: str = "grpc_asyncio", request_type=service.ReportInstanceInfoRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2332,13 +2283,11 @@ async def test_report_instance_info_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.report_instance_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.ReportInstanceInfoRequest() # Establish that the response is the type that we expect. @@ -2351,11 +2300,12 @@ async def test_report_instance_info_async_from_dict(): def test_report_instance_info_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ReportInstanceInfoRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2363,7 +2313,6 @@ def test_report_instance_info_field_headers(): type(client.transport.report_instance_info), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.report_instance_info(request) # Establish that the underlying gRPC stub method was called. @@ -2378,11 +2327,14 @@ def test_report_instance_info_field_headers(): @pytest.mark.asyncio async def test_report_instance_info_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ReportInstanceInfoRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2392,7 +2344,6 @@ async def test_report_instance_info_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.report_instance_info(request) # Establish that the underlying gRPC stub method was called. @@ -2409,7 +2360,7 @@ def test_is_instance_upgradeable( transport: str = "grpc", request_type=service.IsInstanceUpgradeableRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2426,23 +2377,17 @@ def test_is_instance_upgradeable( upgrade_version="upgrade_version_value", upgrade_info="upgrade_info_value", ) - response = client.is_instance_upgradeable(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.IsInstanceUpgradeableRequest() # Establish that the response is the type that we expect. - assert isinstance(response, service.IsInstanceUpgradeableResponse) - assert response.upgradeable is True - assert response.upgrade_version == "upgrade_version_value" - assert response.upgrade_info == "upgrade_info_value" @@ -2454,7 +2399,7 @@ def test_is_instance_upgradeable_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2464,7 +2409,6 @@ def test_is_instance_upgradeable_empty_call(): client.is_instance_upgradeable() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.IsInstanceUpgradeableRequest() @@ -2473,7 +2417,7 @@ async def test_is_instance_upgradeable_async( transport: str = "grpc_asyncio", request_type=service.IsInstanceUpgradeableRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2492,22 +2436,17 @@ async def test_is_instance_upgradeable_async( upgrade_info="upgrade_info_value", ) ) - response = await client.is_instance_upgradeable(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.IsInstanceUpgradeableRequest() # Establish that the response is the type that we expect. assert isinstance(response, service.IsInstanceUpgradeableResponse) - assert response.upgradeable is True - assert response.upgrade_version == "upgrade_version_value" - assert response.upgrade_info == "upgrade_info_value" @@ -2517,11 +2456,12 @@ async def test_is_instance_upgradeable_async_from_dict(): def test_is_instance_upgradeable_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.IsInstanceUpgradeableRequest() + request.notebook_instance = "notebook_instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2529,7 +2469,6 @@ def test_is_instance_upgradeable_field_headers(): type(client.transport.is_instance_upgradeable), "__call__" ) as call: call.return_value = service.IsInstanceUpgradeableResponse() - client.is_instance_upgradeable(request) # Establish that the underlying gRPC stub method was called. @@ -2547,11 +2486,14 @@ def test_is_instance_upgradeable_field_headers(): @pytest.mark.asyncio async def test_is_instance_upgradeable_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.IsInstanceUpgradeableRequest() + request.notebook_instance = "notebook_instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2561,7 +2503,6 @@ async def test_is_instance_upgradeable_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( service.IsInstanceUpgradeableResponse() ) - await client.is_instance_upgradeable(request) # Establish that the underlying gRPC stub method was called. @@ -2581,7 +2522,7 @@ def test_upgrade_instance( transport: str = "grpc", request_type=service.UpgradeInstanceRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2592,13 +2533,11 @@ def test_upgrade_instance( with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.UpgradeInstanceRequest() # Establish that the response is the type that we expect. @@ -2613,7 +2552,7 @@ def test_upgrade_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2621,7 +2560,6 @@ def test_upgrade_instance_empty_call(): client.upgrade_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpgradeInstanceRequest() @@ -2630,7 +2568,7 @@ async def test_upgrade_instance_async( transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2643,13 +2581,11 @@ async def test_upgrade_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.UpgradeInstanceRequest() # Establish that the response is the type that we expect. @@ -2662,17 +2598,17 @@ async def test_upgrade_instance_async_from_dict(): def test_upgrade_instance_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.UpgradeInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2687,11 +2623,14 @@ def test_upgrade_instance_field_headers(): @pytest.mark.asyncio async def test_upgrade_instance_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.UpgradeInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2699,7 +2638,6 @@ async def test_upgrade_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2716,7 +2654,7 @@ def test_upgrade_instance_internal( transport: str = "grpc", request_type=service.UpgradeInstanceInternalRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2729,13 +2667,11 @@ def test_upgrade_instance_internal( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.upgrade_instance_internal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.UpgradeInstanceInternalRequest() # Establish that the response is the type that we expect. @@ -2750,7 +2686,7 @@ def test_upgrade_instance_internal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2760,7 +2696,6 @@ def test_upgrade_instance_internal_empty_call(): client.upgrade_instance_internal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpgradeInstanceInternalRequest() @@ -2769,7 +2704,7 @@ async def test_upgrade_instance_internal_async( transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceInternalRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2784,13 +2719,11 @@ async def test_upgrade_instance_internal_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.upgrade_instance_internal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.UpgradeInstanceInternalRequest() # Establish that the response is the type that we expect. @@ -2803,11 +2736,12 @@ async def test_upgrade_instance_internal_async_from_dict(): def test_upgrade_instance_internal_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.UpgradeInstanceInternalRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2815,7 +2749,6 @@ def test_upgrade_instance_internal_field_headers(): type(client.transport.upgrade_instance_internal), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.upgrade_instance_internal(request) # Establish that the underlying gRPC stub method was called. @@ -2830,11 +2763,14 @@ def test_upgrade_instance_internal_field_headers(): @pytest.mark.asyncio async def test_upgrade_instance_internal_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.UpgradeInstanceInternalRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2844,7 +2780,6 @@ async def test_upgrade_instance_internal_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.upgrade_instance_internal(request) # Establish that the underlying gRPC stub method was called. @@ -2861,7 +2796,7 @@ def test_list_environments( transport: str = "grpc", request_type=service.ListEnvironmentsRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2876,21 +2811,16 @@ def test_list_environments( call.return_value = service.ListEnvironmentsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_environments(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.ListEnvironmentsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -2902,7 +2832,7 @@ def test_list_environments_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2912,7 +2842,6 @@ def test_list_environments_empty_call(): client.list_environments() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListEnvironmentsRequest() @@ -2921,7 +2850,7 @@ async def test_list_environments_async( transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2939,20 +2868,16 @@ async def test_list_environments_async( unreachable=["unreachable_value"], ) ) - response = await client.list_environments(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.ListEnvironmentsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEnvironmentsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -2962,11 +2887,12 @@ async def test_list_environments_async_from_dict(): def test_list_environments_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ListEnvironmentsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2974,7 +2900,6 @@ def test_list_environments_field_headers(): type(client.transport.list_environments), "__call__" ) as call: call.return_value = service.ListEnvironmentsResponse() - client.list_environments(request) # Establish that the underlying gRPC stub method was called. @@ -2989,11 +2914,14 @@ def test_list_environments_field_headers(): @pytest.mark.asyncio async def test_list_environments_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.ListEnvironmentsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3003,7 +2931,6 @@ async def test_list_environments_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( service.ListEnvironmentsResponse() ) - await client.list_environments(request) # Establish that the underlying gRPC stub method was called. @@ -3017,7 +2944,7 @@ async def test_list_environments_field_headers_async(): def test_list_environments_pager(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3057,7 +2984,7 @@ def test_list_environments_pager(): def test_list_environments_pages(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3089,7 +3016,9 @@ def test_list_environments_pages(): @pytest.mark.asyncio async def test_list_environments_async_pager(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3128,7 +3057,9 @@ async def test_list_environments_async_pager(): @pytest.mark.asyncio async def test_list_environments_async_pages(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3166,7 +3097,7 @@ def test_get_environment( transport: str = "grpc", request_type=service.GetEnvironmentRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3183,25 +3114,18 @@ def test_get_environment( post_startup_script="post_startup_script_value", vm_image=environment.VmImage(project="project_value"), ) - response = client.get_environment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.GetEnvironmentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, environment.Environment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.post_startup_script == "post_startup_script_value" @@ -3213,7 +3137,7 @@ def test_get_environment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3221,7 +3145,6 @@ def test_get_environment_empty_call(): client.get_environment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetEnvironmentRequest() @@ -3230,7 +3153,7 @@ async def test_get_environment_async( transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3248,24 +3171,18 @@ async def test_get_environment_async( post_startup_script="post_startup_script_value", ) ) - response = await client.get_environment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.GetEnvironmentRequest() # Establish that the response is the type that we expect. assert isinstance(response, environment.Environment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.post_startup_script == "post_startup_script_value" @@ -3275,17 +3192,17 @@ async def test_get_environment_async_from_dict(): def test_get_environment_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.GetEnvironmentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_environment), "__call__") as call: call.return_value = environment.Environment() - client.get_environment(request) # Establish that the underlying gRPC stub method was called. @@ -3300,11 +3217,14 @@ def test_get_environment_field_headers(): @pytest.mark.asyncio async def test_get_environment_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.GetEnvironmentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3312,7 +3232,6 @@ async def test_get_environment_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( environment.Environment() ) - await client.get_environment(request) # Establish that the underlying gRPC stub method was called. @@ -3329,7 +3248,7 @@ def test_create_environment( transport: str = "grpc", request_type=service.CreateEnvironmentRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3342,13 +3261,11 @@ def test_create_environment( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_environment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateEnvironmentRequest() # Establish that the response is the type that we expect. @@ -3363,7 +3280,7 @@ def test_create_environment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3373,7 +3290,6 @@ def test_create_environment_empty_call(): client.create_environment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateEnvironmentRequest() @@ -3382,7 +3298,7 @@ async def test_create_environment_async( transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3397,13 +3313,11 @@ async def test_create_environment_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_environment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateEnvironmentRequest() # Establish that the response is the type that we expect. @@ -3416,11 +3330,12 @@ async def test_create_environment_async_from_dict(): def test_create_environment_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.CreateEnvironmentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3428,7 +3343,6 @@ def test_create_environment_field_headers(): type(client.transport.create_environment), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_environment(request) # Establish that the underlying gRPC stub method was called. @@ -3443,11 +3357,14 @@ def test_create_environment_field_headers(): @pytest.mark.asyncio async def test_create_environment_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.CreateEnvironmentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3457,7 +3374,6 @@ async def test_create_environment_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_environment(request) # Establish that the underlying gRPC stub method was called. @@ -3474,7 +3390,7 @@ def test_delete_environment( transport: str = "grpc", request_type=service.DeleteEnvironmentRequest ): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3487,13 +3403,11 @@ def test_delete_environment( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_environment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteEnvironmentRequest() # Establish that the response is the type that we expect. @@ -3508,7 +3422,7 @@ def test_delete_environment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3518,7 +3432,6 @@ def test_delete_environment_empty_call(): client.delete_environment() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteEnvironmentRequest() @@ -3527,7 +3440,7 @@ async def test_delete_environment_async( transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest ): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3542,13 +3455,11 @@ async def test_delete_environment_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteEnvironmentRequest() # Establish that the response is the type that we expect. @@ -3561,11 +3472,12 @@ async def test_delete_environment_async_from_dict(): def test_delete_environment_field_headers(): - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.DeleteEnvironmentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3573,7 +3485,6 @@ def test_delete_environment_field_headers(): type(client.transport.delete_environment), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_environment(request) # Establish that the underlying gRPC stub method was called. @@ -3588,11 +3499,14 @@ def test_delete_environment_field_headers(): @pytest.mark.asyncio async def test_delete_environment_field_headers_async(): - client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = service.DeleteEnvironmentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3602,7 +3516,6 @@ async def test_delete_environment_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. @@ -3618,16 +3531,16 @@ async def test_delete_environment_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.NotebookServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.NotebookServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = NotebookServiceClient( @@ -3637,7 +3550,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.NotebookServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = NotebookServiceClient( @@ -3648,7 +3561,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.NotebookServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = NotebookServiceClient(transport=transport) assert client.transport is transport @@ -3657,13 +3570,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.NotebookServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.NotebookServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -3678,23 +3591,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),) + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.NotebookServiceGrpcTransport,) def test_notebook_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.NotebookServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -3706,7 +3619,7 @@ def test_notebook_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.NotebookServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -3742,15 +3655,37 @@ def test_notebook_service_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_notebook_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotebookServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_notebook_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.NotebookServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -3763,19 +3698,33 @@ def test_notebook_service_base_transport_with_credentials_file(): def test_notebook_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.NotebookServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_notebook_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NotebookServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_notebook_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) NotebookServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -3783,20 +3732,156 @@ def test_notebook_service_auth_adc(): ) -def test_notebook_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_notebook_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.NotebookServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_notebook_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.NotebookServiceGrpcTransport, grpc_helpers), + (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_notebook_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "notebooks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="notebooks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.NotebookServiceGrpcTransport, grpc_helpers), + (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_notebook_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "notebooks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.NotebookServiceGrpcTransport, grpc_helpers), + (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_notebook_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "notebooks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -3805,7 +3890,7 @@ def test_notebook_service_transport_auth_adc(): ], ) def test_notebook_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -3844,7 +3929,7 @@ def test_notebook_service_grpc_transport_client_cert_source_for_mtls(transport_c def test_notebook_service_host_no_port(): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="notebooks.googleapis.com" ), @@ -3854,7 +3939,7 @@ def test_notebook_service_host_no_port(): def test_notebook_service_host_with_port(): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="notebooks.googleapis.com:8000" ), @@ -3910,9 +3995,9 @@ def test_notebook_service_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -3988,7 +4073,7 @@ def test_notebook_service_transport_channel_mtls_with_adc(transport_class): def test_notebook_service_grpc_lro_client(): client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -4001,7 +4086,7 @@ def test_notebook_service_grpc_lro_client(): def test_notebook_service_grpc_lro_async_client(): client = NotebookServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -4015,7 +4100,6 @@ def test_notebook_service_grpc_lro_async_client(): def test_environment_path(): project = "squid" environment = "clam" - expected = "projects/{project}/environments/{environment}".format( project=project, environment=environment, ) @@ -4038,7 +4122,6 @@ def test_parse_environment_path(): def test_instance_path(): project = "oyster" instance = "nudibranch" - expected = "projects/{project}/instances/{instance}".format( project=project, instance=instance, ) @@ -4060,7 +4143,6 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4081,7 +4163,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "scallop" - expected = "folders/{folder}".format(folder=folder,) actual = NotebookServiceClient.common_folder_path(folder) assert expected == actual @@ -4100,7 +4181,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "squid" - expected = "organizations/{organization}".format(organization=organization,) actual = NotebookServiceClient.common_organization_path(organization) assert expected == actual @@ -4119,7 +4199,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "whelk" - expected = "projects/{project}".format(project=project,) actual = NotebookServiceClient.common_project_path(project) assert expected == actual @@ -4139,7 +4218,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "oyster" location = "nudibranch" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -4166,7 +4244,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.NotebookServiceTransport, "_prep_wrapped_messages" ) as prep: client = NotebookServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4175,6 +4253,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = NotebookServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From 9790dc9da532ec396a8d81e3946da53cf243c066 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 14 May 2021 19:12:04 -0600 Subject: [PATCH 13/18] fix(deps): add packaging requirement (#45) Add packaging requirement. packaging.version is used for a version comparison in transports/base.py and is needed after the upgrade to gapic-generator-python 0.46.3 --- setup.py | 1 + testing/constraints-3.6.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/setup.py b/setup.py index d36d61d..7468506 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ install_requires=( "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.1.0", + "packaging >= 14.3", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 2de4f17..4fc6a86 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -7,3 +7,4 @@ # Then this file should have foo==1.14.0 google-api-core==1.22.2 proto-plus==1.1.0 +packaging==14.3 From bd17fb7bb636594e5beb968af7bbc76e84bda2de Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 16 May 2021 14:04:05 +0000 Subject: [PATCH 14/18] chore: new owl bot post processor docker image (#46) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa --- .github/.OwlBot.lock.yaml | 5 ++--- .pre-commit-config.yaml | 2 +- CONTRIBUTING.rst | 16 +--------------- noxfile.py | 14 ++------------ 4 files changed, 6 insertions(+), 31 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 29084e8..864c176 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8912e9b..4f00c7c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.2 hooks: - id: flake8 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 595c4ac..080d4e5 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage diff --git a/noxfile.py b/noxfile.py index 4d37cd3..70417e8 100644 --- a/noxfile.py +++ b/noxfile.py @@ -62,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -131,9 +124,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") From 01c3898f97e1179c150fbbb7336e31bcc6fcabea Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 09:20:09 +0000 Subject: [PATCH 15/18] chore: new owl bot post processor docker image (#47) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 --- .github/.OwlBot.lock.yaml | 2 +- noxfile.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 864c176..46e3f02 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa + digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 diff --git a/noxfile.py b/noxfile.py index 70417e8..03aa2f5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -179,7 +179,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -201,7 +201,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( From 12ba32aec375b5cf9073ea6202b8d55a7fb4744a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 16:06:11 +0000 Subject: [PATCH 16/18] chore: new owl bot post processor docker image (#48) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf --- .github/.OwlBot.lock.yaml | 2 +- docs/multiprocessing.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 46e3f02..127c2cd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 + digest: sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst index 1cb29d4..536d17b 100644 --- a/docs/multiprocessing.rst +++ b/docs/multiprocessing.rst @@ -1,7 +1,7 @@ .. note:: - Because this client uses :mod:`grpcio` library, it is safe to + Because this client uses :mod:`grpc` library, it is safe to share instances across threads. In multiprocessing scenarios, the best practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or :class:`multiprocessing.Process`. From bd49b04c060598bfc86e48e349a2981daeb6202d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 17:00:13 +0000 Subject: [PATCH 17/18] chore: new owl bot post processor docker image (#49) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 127c2cd..da616c9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf + digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 diff --git a/docs/conf.py b/docs/conf.py index b072fff..3957e0b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } From 7043302c29a89b39e180f4d359384b8c0d24422d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Jun 2021 11:26:02 +0000 Subject: [PATCH 18/18] chore: release 0.2.0 (#28) :robot: I have created a release \*beep\* \*boop\* --- ## [0.2.0](https://www.github.com/googleapis/python-notebooks/compare/v0.1.2...v0.2.0) (2021-05-28) ### Features * add `from_service_account_info` ([#26](https://www.github.com/googleapis/python-notebooks/issues/26)) ([4999922](https://www.github.com/googleapis/python-notebooks/commit/4999922dc0f6eaebc8aec58929176ab6b87cfdca)) * support self-signed JWT flow for service accounts ([7a84b3b](https://www.github.com/googleapis/python-notebooks/commit/7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd)) ### Bug Fixes * add async client to %name_%version/init.py ([7a84b3b](https://www.github.com/googleapis/python-notebooks/commit/7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd)) * **deps:** add packaging requirement ([#45](https://www.github.com/googleapis/python-notebooks/issues/45)) ([9790dc9](https://www.github.com/googleapis/python-notebooks/commit/9790dc9da532ec396a8d81e3946da53cf243c066)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 14 ++++++++++++++ setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6503e4..abcabd4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.2.0](https://www.github.com/googleapis/python-notebooks/compare/v0.1.2...v0.2.0) (2021-05-28) + + +### Features + +* add `from_service_account_info` ([#26](https://www.github.com/googleapis/python-notebooks/issues/26)) ([4999922](https://www.github.com/googleapis/python-notebooks/commit/4999922dc0f6eaebc8aec58929176ab6b87cfdca)) +* support self-signed JWT flow for service accounts ([7a84b3b](https://www.github.com/googleapis/python-notebooks/commit/7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([7a84b3b](https://www.github.com/googleapis/python-notebooks/commit/7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd)) +* **deps:** add packaging requirement ([#45](https://www.github.com/googleapis/python-notebooks/issues/45)) ([9790dc9](https://www.github.com/googleapis/python-notebooks/commit/9790dc9da532ec396a8d81e3946da53cf243c066)) + ### [0.1.2](https://www.github.com/googleapis/python-notebooks/compare/v0.1.1...v0.1.2) (2021-02-08) diff --git a/setup.py b/setup.py index 7468506..6a2c301 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "0.1.2" +version = "0.2.0" package_root = os.path.abspath(os.path.dirname(__file__))