diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..73a7e26
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,18 @@
+[run]
+branch = True
+
+[report]
+fail_under = 100
+show_missing = True
+omit =
+ google/cloud/notebooks/__init__.py
+exclude_lines =
+ # Re-enable the standard pragma
+ pragma: NO COVER
+ # Ignore debug-only repr
+ def __repr__
+ # Ignore pkg_resources exceptions.
+ # This is added at the module level as a safeguard for if someone
+ # generates the code and tries to run it without pip installing. This
+ # makes it virtually impossible to test properly.
+ except pkg_resources.DistributionNotFound
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
new file mode 100644
index 0000000..da616c9
--- /dev/null
+++ b/.github/.OwlBot.lock.yaml
@@ -0,0 +1,3 @@
+docker:
+ image: gcr.io/repo-automation-bots/owlbot-python:latest
+ digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
new file mode 100644
index 0000000..282df92
--- /dev/null
+++ b/.github/.OwlBot.yaml
@@ -0,0 +1,26 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+docker:
+ image: gcr.io/repo-automation-bots/owlbot-python:latest
+
+deep-remove-regex:
+ - /owl-bot-staging
+
+deep-copy-regex:
+ - source: /google/cloud/notebooks/(v.*)/.*-py/(.*)
+ dest: /owl-bot-staging/$1/$2
+
+begin-after-commit-hash: b06c9034cfcbce180ba732d03be6526e5c8ea1bc
+
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000..6fe78aa
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b9daa52..b4243ce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 762e3c9..ab9b6e2 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,7 +15,11 @@
set -eo pipefail
-cd github/python-notebooks
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-notebooks"
+fi
+
+cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3.6 -m nox -s "${NOX_SESSION:-}"
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3.6 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
index 1118107..bad28ff 100644
--- a/.kokoro/docs/docs-presubmit.cfg
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -15,3 +15,14 @@ env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
value: "false"
}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-notebooks/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 292da3e..c5ce41e 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools
export PYTHONUNBUFFERED=1
# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
+TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token")
cd github/python-notebooks
python3 setup.py sdist bdist_wheel
-twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
+twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index c07503d..9d9cb3b 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,18 +23,8 @@ env_vars: {
value: "github/python-notebooks/.kokoro/release.sh"
}
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
# Tokens needed to report release status back to GitHub
env_vars: {
key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
-}
\ No newline at end of file
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token"
+}
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 0000000..f5e0618
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-notebooks
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 0000000..cf5de74
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index f05fb0a..1273a87 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
# `-e` enables the script to automatically fail when a command fails
# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
@@ -24,87 +28,19 @@ cd github/python-notebooks
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
LATEST_RELEASE=$(git describe --abbrev=0 --tags)
git checkout $LATEST_RELEASE
-fi
-
-# Exit early if samples directory doesn't exist
-if [ ! -d "./samples" ]; then
- echo "No tests run. `./samples` not found"
- exit 0
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
fi
+fi
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
+exec .kokoro/test-samples-impl.sh
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a9024b1..4f00c7c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,3 +1,17 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
@@ -12,6 +26,6 @@ repos:
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.8.4
+ rev: 3.9.2
hooks:
- id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index dfdbcd2..4817b29 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -6,6 +6,7 @@
"issue_tracker": "",
"release_level": "beta",
"language": "python",
+ "library_type": "GAPIC_AUTO",
"repo": "googleapis/python-notebooks",
"distribution_name": "google-cloud-notebooks",
"api_id": "notebooks.googleapis.com"
diff --git a/.trampolinerc b/.trampolinerc
index 995ee29..383b6ec 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,6 +24,7 @@ required_envvars+=(
pass_down_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
+ "NOX_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b6503e4..abcabd4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,19 @@
# Changelog
+## [0.2.0](https://www.github.com/googleapis/python-notebooks/compare/v0.1.2...v0.2.0) (2021-05-28)
+
+
+### Features
+
+* add `from_service_account_info` ([#26](https://www.github.com/googleapis/python-notebooks/issues/26)) ([4999922](https://www.github.com/googleapis/python-notebooks/commit/4999922dc0f6eaebc8aec58929176ab6b87cfdca))
+* support self-signed JWT flow for service accounts ([7a84b3b](https://www.github.com/googleapis/python-notebooks/commit/7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd))
+
+
+### Bug Fixes
+
+* add async client to %name_%version/init.py ([7a84b3b](https://www.github.com/googleapis/python-notebooks/commit/7a84b3b9b8c206a0dc33ccc09821ffa8ee8c3ddd))
+* **deps:** add packaging requirement ([#45](https://www.github.com/googleapis/python-notebooks/issues/45)) ([9790dc9](https://www.github.com/googleapis/python-notebooks/commit/9790dc9da532ec396a8d81e3946da53cf243c066))
+
### [0.1.2](https://www.github.com/googleapis/python-notebooks/compare/v0.1.1...v0.1.2) (2021-02-08)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index a37c975..080d4e5 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -133,34 +142,25 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
auth settings and change some configuration in your project to
run all the tests.
-- System tests will be run against an actual project and
- so you'll need to provide some environment variables to facilitate
- authentication to your project:
-
- - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
- Such a file can be downloaded directly from the developer's console by clicking
- "Generate new JSON key". See private key
- `docs `__
- for more details.
-
-- Once you have downloaded your json keys, set the environment variable
- ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file::
-
- $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
-
+- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__.
*************
Test Coverage
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d1..e783f4c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..8b58ae9
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,7 @@
+# Security Policy
+
+To report a security issue, please use [g.co/vulnz](https://g.co/vulnz).
+
+The Google Security Team will respond within 5 working days of your report on g.co/vulnz.
+
+We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue.
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index bcd37bb..b0a2954 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,9 +1,20 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
+}
/* Ensure minimum width for 'Parameters' / 'Returns' column */
dl.field-list > dt {
min-width: 100px
}
+
+/* Insert space between methods for readability */
+dl.method {
+ padding-top: 10px;
+ padding-bottom: 10px
+}
+
+/* Insert empty space between classes */
+dl.class {
+ padding-bottom: 50px
+}
diff --git a/docs/conf.py b/docs/conf.py
index c79c15d..3957e0b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,17 @@
# -*- coding: utf-8 -*-
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
# google-cloud-notebooks documentation build configuration file
#
@@ -350,6 +363,7 @@
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
+ "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
index 1cb29d4..536d17b 100644
--- a/docs/multiprocessing.rst
+++ b/docs/multiprocessing.rst
@@ -1,7 +1,7 @@
.. note::
- Because this client uses :mod:`grpcio` library, it is safe to
+ Because this client uses :mod:`grpc` library, it is safe to
share instances across threads. In multiprocessing scenarios, the best
practice is to create client instances *after* the invocation of
- :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
:class:`multiprocessing.Process`.
diff --git a/docs/notebooks_v1beta1/notebook_service.rst b/docs/notebooks_v1beta1/notebook_service.rst
new file mode 100644
index 0000000..79a8fea
--- /dev/null
+++ b/docs/notebooks_v1beta1/notebook_service.rst
@@ -0,0 +1,10 @@
+NotebookService
+---------------------------------
+
+.. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service
+ :members:
+ :inherited-members:
+
+.. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/notebooks_v1beta1/services.rst b/docs/notebooks_v1beta1/services.rst
index a88bad5..33b3547 100644
--- a/docs/notebooks_v1beta1/services.rst
+++ b/docs/notebooks_v1beta1/services.rst
@@ -1,6 +1,6 @@
Services for Google Cloud Notebooks v1beta1 API
===============================================
+.. toctree::
+ :maxdepth: 2
-.. automodule:: google.cloud.notebooks_v1beta1.services.notebook_service
- :members:
- :inherited-members:
+ notebook_service
diff --git a/docs/notebooks_v1beta1/types.rst b/docs/notebooks_v1beta1/types.rst
index b981f31..7cdeaf4 100644
--- a/docs/notebooks_v1beta1/types.rst
+++ b/docs/notebooks_v1beta1/types.rst
@@ -3,4 +3,5 @@ Types for Google Cloud Notebooks v1beta1 API
.. automodule:: google.cloud.notebooks_v1beta1.types
:members:
+ :undoc-members:
:show-inheritance:
diff --git a/google/cloud/notebooks/__init__.py b/google/cloud/notebooks/__init__.py
index 70533b4..2d3bf90 100644
--- a/google/cloud/notebooks/__init__.py
+++ b/google/cloud/notebooks/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,12 +14,13 @@
# limitations under the License.
#
-from google.cloud.notebooks_v1beta1.services.notebook_service.async_client import (
- NotebookServiceAsyncClient,
-)
from google.cloud.notebooks_v1beta1.services.notebook_service.client import (
NotebookServiceClient,
)
+from google.cloud.notebooks_v1beta1.services.notebook_service.async_client import (
+ NotebookServiceAsyncClient,
+)
+
from google.cloud.notebooks_v1beta1.types.environment import ContainerImage
from google.cloud.notebooks_v1beta1.types.environment import Environment
from google.cloud.notebooks_v1beta1.types.environment import VmImage
@@ -50,23 +50,24 @@
from google.cloud.notebooks_v1beta1.types.service import UpgradeInstanceRequest
__all__ = (
+ "NotebookServiceClient",
+ "NotebookServiceAsyncClient",
"ContainerImage",
+ "Environment",
+ "VmImage",
+ "Instance",
"CreateEnvironmentRequest",
"CreateInstanceRequest",
"DeleteEnvironmentRequest",
"DeleteInstanceRequest",
- "Environment",
"GetEnvironmentRequest",
"GetInstanceRequest",
- "Instance",
"IsInstanceUpgradeableRequest",
"IsInstanceUpgradeableResponse",
"ListEnvironmentsRequest",
"ListEnvironmentsResponse",
"ListInstancesRequest",
"ListInstancesResponse",
- "NotebookServiceAsyncClient",
- "NotebookServiceClient",
"OperationMetadata",
"RegisterInstanceRequest",
"ReportInstanceInfoRequest",
@@ -78,5 +79,4 @@
"StopInstanceRequest",
"UpgradeInstanceInternalRequest",
"UpgradeInstanceRequest",
- "VmImage",
)
diff --git a/google/cloud/notebooks_v1beta1/__init__.py b/google/cloud/notebooks_v1beta1/__init__.py
index 707961a..af872cb 100644
--- a/google/cloud/notebooks_v1beta1/__init__.py
+++ b/google/cloud/notebooks_v1beta1/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,8 @@
#
from .services.notebook_service import NotebookServiceClient
+from .services.notebook_service import NotebookServiceAsyncClient
+
from .types.environment import ContainerImage
from .types.environment import Environment
from .types.environment import VmImage
@@ -44,8 +45,8 @@
from .types.service import UpgradeInstanceInternalRequest
from .types.service import UpgradeInstanceRequest
-
__all__ = (
+ "NotebookServiceAsyncClient",
"ContainerImage",
"CreateEnvironmentRequest",
"CreateInstanceRequest",
@@ -61,6 +62,7 @@
"ListEnvironmentsResponse",
"ListInstancesRequest",
"ListInstancesResponse",
+ "NotebookServiceClient",
"OperationMetadata",
"RegisterInstanceRequest",
"ReportInstanceInfoRequest",
@@ -73,5 +75,4 @@
"UpgradeInstanceInternalRequest",
"UpgradeInstanceRequest",
"VmImage",
- "NotebookServiceClient",
)
diff --git a/google/cloud/notebooks_v1beta1/gapic_metadata.json b/google/cloud/notebooks_v1beta1/gapic_metadata.json
new file mode 100644
index 0000000..b435e9c
--- /dev/null
+++ b/google/cloud/notebooks_v1beta1/gapic_metadata.json
@@ -0,0 +1,213 @@
+ {
+ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+ "language": "python",
+ "libraryPackage": "google.cloud.notebooks_v1beta1",
+ "protoPackage": "google.cloud.notebooks.v1beta1",
+ "schema": "1.0",
+ "services": {
+ "NotebookService": {
+ "clients": {
+ "grpc": {
+ "libraryClient": "NotebookServiceClient",
+ "rpcs": {
+ "CreateEnvironment": {
+ "methods": [
+ "create_environment"
+ ]
+ },
+ "CreateInstance": {
+ "methods": [
+ "create_instance"
+ ]
+ },
+ "DeleteEnvironment": {
+ "methods": [
+ "delete_environment"
+ ]
+ },
+ "DeleteInstance": {
+ "methods": [
+ "delete_instance"
+ ]
+ },
+ "GetEnvironment": {
+ "methods": [
+ "get_environment"
+ ]
+ },
+ "GetInstance": {
+ "methods": [
+ "get_instance"
+ ]
+ },
+ "IsInstanceUpgradeable": {
+ "methods": [
+ "is_instance_upgradeable"
+ ]
+ },
+ "ListEnvironments": {
+ "methods": [
+ "list_environments"
+ ]
+ },
+ "ListInstances": {
+ "methods": [
+ "list_instances"
+ ]
+ },
+ "RegisterInstance": {
+ "methods": [
+ "register_instance"
+ ]
+ },
+ "ReportInstanceInfo": {
+ "methods": [
+ "report_instance_info"
+ ]
+ },
+ "ResetInstance": {
+ "methods": [
+ "reset_instance"
+ ]
+ },
+ "SetInstanceAccelerator": {
+ "methods": [
+ "set_instance_accelerator"
+ ]
+ },
+ "SetInstanceLabels": {
+ "methods": [
+ "set_instance_labels"
+ ]
+ },
+ "SetInstanceMachineType": {
+ "methods": [
+ "set_instance_machine_type"
+ ]
+ },
+ "StartInstance": {
+ "methods": [
+ "start_instance"
+ ]
+ },
+ "StopInstance": {
+ "methods": [
+ "stop_instance"
+ ]
+ },
+ "UpgradeInstance": {
+ "methods": [
+ "upgrade_instance"
+ ]
+ },
+ "UpgradeInstanceInternal": {
+ "methods": [
+ "upgrade_instance_internal"
+ ]
+ }
+ }
+ },
+ "grpc-async": {
+ "libraryClient": "NotebookServiceAsyncClient",
+ "rpcs": {
+ "CreateEnvironment": {
+ "methods": [
+ "create_environment"
+ ]
+ },
+ "CreateInstance": {
+ "methods": [
+ "create_instance"
+ ]
+ },
+ "DeleteEnvironment": {
+ "methods": [
+ "delete_environment"
+ ]
+ },
+ "DeleteInstance": {
+ "methods": [
+ "delete_instance"
+ ]
+ },
+ "GetEnvironment": {
+ "methods": [
+ "get_environment"
+ ]
+ },
+ "GetInstance": {
+ "methods": [
+ "get_instance"
+ ]
+ },
+ "IsInstanceUpgradeable": {
+ "methods": [
+ "is_instance_upgradeable"
+ ]
+ },
+ "ListEnvironments": {
+ "methods": [
+ "list_environments"
+ ]
+ },
+ "ListInstances": {
+ "methods": [
+ "list_instances"
+ ]
+ },
+ "RegisterInstance": {
+ "methods": [
+ "register_instance"
+ ]
+ },
+ "ReportInstanceInfo": {
+ "methods": [
+ "report_instance_info"
+ ]
+ },
+ "ResetInstance": {
+ "methods": [
+ "reset_instance"
+ ]
+ },
+ "SetInstanceAccelerator": {
+ "methods": [
+ "set_instance_accelerator"
+ ]
+ },
+ "SetInstanceLabels": {
+ "methods": [
+ "set_instance_labels"
+ ]
+ },
+ "SetInstanceMachineType": {
+ "methods": [
+ "set_instance_machine_type"
+ ]
+ },
+ "StartInstance": {
+ "methods": [
+ "start_instance"
+ ]
+ },
+ "StopInstance": {
+ "methods": [
+ "stop_instance"
+ ]
+ },
+ "UpgradeInstance": {
+ "methods": [
+ "upgrade_instance"
+ ]
+ },
+ "UpgradeInstanceInternal": {
+ "methods": [
+ "upgrade_instance_internal"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/google/cloud/notebooks_v1beta1/services/__init__.py b/google/cloud/notebooks_v1beta1/services/__init__.py
index 42ffdf2..4de6597 100644
--- a/google/cloud/notebooks_v1beta1/services/__init__.py
+++ b/google/cloud/notebooks_v1beta1/services/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py b/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py
index 92c4e29..a17402b 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from .client import NotebookServiceClient
from .async_client import NotebookServiceAsyncClient
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py
index 99fafe3..18b6efd 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
import functools
import re
@@ -22,10 +20,10 @@
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
-from google.api_core import exceptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api_core import operation # type: ignore
@@ -34,9 +32,8 @@
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
-from google.protobuf import empty_pb2 as empty # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport
from .client import NotebookServiceClient
@@ -54,42 +51,67 @@ class NotebookServiceAsyncClient:
parse_environment_path = staticmethod(NotebookServiceClient.parse_environment_path)
instance_path = staticmethod(NotebookServiceClient.instance_path)
parse_instance_path = staticmethod(NotebookServiceClient.parse_instance_path)
-
common_billing_account_path = staticmethod(
NotebookServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
NotebookServiceClient.parse_common_billing_account_path
)
-
common_folder_path = staticmethod(NotebookServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
NotebookServiceClient.parse_common_folder_path
)
-
common_organization_path = staticmethod(
NotebookServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
NotebookServiceClient.parse_common_organization_path
)
-
common_project_path = staticmethod(NotebookServiceClient.common_project_path)
parse_common_project_path = staticmethod(
NotebookServiceClient.parse_common_project_path
)
-
common_location_path = staticmethod(NotebookServiceClient.common_location_path)
parse_common_location_path = staticmethod(
NotebookServiceClient.parse_common_location_path
)
- from_service_account_file = NotebookServiceClient.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ NotebookServiceAsyncClient: The constructed client.
+ """
+ return NotebookServiceClient.from_service_account_info.__func__(NotebookServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ NotebookServiceAsyncClient: The constructed client.
+ """
+ return NotebookServiceClient.from_service_account_file.__func__(NotebookServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
def transport(self) -> NotebookServiceTransport:
- """Return the transport used by the client instance.
+ """Returns the transport used by the client instance.
Returns:
NotebookServiceTransport: The transport used by the client instance.
@@ -103,12 +125,12 @@ def transport(self) -> NotebookServiceTransport:
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
transport: Union[str, NotebookServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiate the notebook service client.
+ """Instantiates the notebook service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -140,7 +162,6 @@ def __init__(
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
-
self._client = NotebookServiceClient(
credentials=credentials,
transport=transport,
@@ -159,10 +180,9 @@ async def list_instances(
r"""Lists instances in a given project and location.
Args:
- request (:class:`~.service.ListInstancesRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.ListInstancesRequest`):
The request object. Request for listing notebook
instances.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -170,7 +190,7 @@ async def list_instances(
sent along with the request as metadata.
Returns:
- ~.pagers.ListInstancesAsyncPager:
+ google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListInstancesAsyncPager:
Response for listing notebook
instances.
Iterating over this object will yield
@@ -179,7 +199,6 @@ async def list_instances(
"""
# Create or coerce a protobuf request object.
-
request = service.ListInstancesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -219,10 +238,9 @@ async def get_instance(
r"""Gets details of a single Instance.
Args:
- request (:class:`~.service.GetInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.GetInstanceRequest`):
The request object. Request for getting a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -230,13 +248,12 @@ async def get_instance(
sent along with the request as metadata.
Returns:
- ~.instance.Instance:
+ google.cloud.notebooks_v1beta1.types.Instance:
The definition of a notebook
instance.
"""
# Create or coerce a protobuf request object.
-
request = service.GetInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -271,10 +288,9 @@ async def create_instance(
location.
Args:
- request (:class:`~.service.CreateInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.CreateInstanceRequest`):
The request object. Request for creating a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -282,16 +298,15 @@ async def create_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.CreateInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -338,10 +353,9 @@ async def register_instance(
API.
Args:
- request (:class:`~.service.RegisterInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.RegisterInstanceRequest`):
The request object. Request for registering a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -349,16 +363,15 @@ async def register_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.RegisterInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -400,10 +413,9 @@ async def set_instance_accelerator(
r"""Updates the guest accelerators of a single Instance.
Args:
- request (:class:`~.service.SetInstanceAcceleratorRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceAcceleratorRequest`):
The request object. Request for setting instance
accelerator.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -411,16 +423,15 @@ async def set_instance_accelerator(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.SetInstanceAcceleratorRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -462,10 +473,9 @@ async def set_instance_machine_type(
r"""Updates the machine type of a single Instance.
Args:
- request (:class:`~.service.SetInstanceMachineTypeRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceMachineTypeRequest`):
The request object. Request for setting instance machine
type.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -473,16 +483,15 @@ async def set_instance_machine_type(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.SetInstanceMachineTypeRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -524,9 +533,8 @@ async def set_instance_labels(
r"""Updates the labels of an Instance.
Args:
- request (:class:`~.service.SetInstanceLabelsRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest`):
The request object. Request for setting instance labels.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -534,16 +542,15 @@ async def set_instance_labels(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.SetInstanceLabelsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -585,10 +592,9 @@ async def delete_instance(
r"""Deletes a single Instance.
Args:
- request (:class:`~.service.DeleteInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.DeleteInstanceRequest`):
The request object. Request for deleting a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -596,28 +602,25 @@ async def delete_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.empty.Empty``: A generic empty message that
- you can re-use to avoid defining duplicated empty
- messages in your APIs. A typical example is to use it as
- the request or the response type of an API method. For
- instance:
+ The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to
+ use it as the request or the response type of an API
+ method. For instance:
- ::
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns
+ (google.protobuf.Empty);
- service Foo {
- rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- }
+ }
- The JSON representation for ``Empty`` is empty JSON
- object ``{}``.
+ The JSON representation for Empty is empty JSON
+ object {}.
"""
# Create or coerce a protobuf request object.
-
request = service.DeleteInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -641,7 +644,7 @@ async def delete_instance(
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
- empty.Empty,
+ empty_pb2.Empty,
metadata_type=service.OperationMetadata,
)
@@ -659,10 +662,9 @@ async def start_instance(
r"""Starts a notebook instance.
Args:
- request (:class:`~.service.StartInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.StartInstanceRequest`):
The request object. Request for starting a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -670,16 +672,15 @@ async def start_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.StartInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -721,10 +722,9 @@ async def stop_instance(
r"""Stops a notebook instance.
Args:
- request (:class:`~.service.StopInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.StopInstanceRequest`):
The request object. Request for stopping a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -732,16 +732,15 @@ async def stop_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.StopInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -783,10 +782,9 @@ async def reset_instance(
r"""Resets a notebook instance.
Args:
- request (:class:`~.service.ResetInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.ResetInstanceRequest`):
The request object. Request for reseting a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -794,16 +792,15 @@ async def reset_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.ResetInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -849,10 +846,9 @@ async def report_instance_info(
this method directly.
Args:
- request (:class:`~.service.ReportInstanceInfoRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest`):
The request object. Request for notebook instances to
report information to Notebooks API.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -860,16 +856,15 @@ async def report_instance_info(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.ReportInstanceInfoRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -911,10 +906,9 @@ async def is_instance_upgradeable(
r"""Check if a notebook instance is upgradable.
Args:
- request (:class:`~.service.IsInstanceUpgradeableRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest`):
The request object. Request for checking if a notebook
instance is upgradeable.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -922,13 +916,12 @@ async def is_instance_upgradeable(
sent along with the request as metadata.
Returns:
- ~.service.IsInstanceUpgradeableResponse:
+ google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableResponse:
Response for checking if a notebook
instance is upgradeable.
"""
# Create or coerce a protobuf request object.
-
request = service.IsInstanceUpgradeableRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -964,10 +957,9 @@ async def upgrade_instance(
r"""Upgrades a notebook instance to the latest version.
Args:
- request (:class:`~.service.UpgradeInstanceRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest`):
The request object. Request for upgrading a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -975,16 +967,15 @@ async def upgrade_instance(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.UpgradeInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1028,10 +1019,9 @@ async def upgrade_instance_internal(
this method directly.
Args:
- request (:class:`~.service.UpgradeInstanceInternalRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest`):
The request object. Request for upgrading a notebook
instance from within the VM
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1039,16 +1029,15 @@ async def upgrade_instance_internal(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
request = service.UpgradeInstanceInternalRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1090,9 +1079,8 @@ async def list_environments(
r"""Lists environments in a project.
Args:
- request (:class:`~.service.ListEnvironmentsRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest`):
The request object. Request for listing environments.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1100,7 +1088,7 @@ async def list_environments(
sent along with the request as metadata.
Returns:
- ~.pagers.ListEnvironmentsAsyncPager:
+ google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListEnvironmentsAsyncPager:
Response for listing environments.
Iterating over this object will yield
results and resolve additional pages
@@ -1108,7 +1096,6 @@ async def list_environments(
"""
# Create or coerce a protobuf request object.
-
request = service.ListEnvironmentsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1148,10 +1135,9 @@ async def get_environment(
r"""Gets details of a single Environment.
Args:
- request (:class:`~.service.GetEnvironmentRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.GetEnvironmentRequest`):
The request object. Request for getting a notebook
environment.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1159,14 +1145,13 @@ async def get_environment(
sent along with the request as metadata.
Returns:
- ~.environment.Environment:
+ google.cloud.notebooks_v1beta1.types.Environment:
Definition of a software environment
that is used to start a notebook
instance.
"""
# Create or coerce a protobuf request object.
-
request = service.GetEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1200,10 +1185,9 @@ async def create_environment(
r"""Creates a new Environment.
Args:
- request (:class:`~.service.CreateEnvironmentRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.CreateEnvironmentRequest`):
The request object. Request for creating a notebook
environment.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1211,17 +1195,14 @@ async def create_environment(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.environment.Environment``: Definition of a
- software environment that is used to start a notebook
- instance.
+ The result type for the operation will be :class:`google.cloud.notebooks_v1beta1.types.Environment` Definition of a software environment that is used to start a notebook
+ instance.
"""
# Create or coerce a protobuf request object.
-
request = service.CreateEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1263,10 +1244,9 @@ async def delete_environment(
r"""Deletes a single Environment.
Args:
- request (:class:`~.service.DeleteEnvironmentRequest`):
+ request (:class:`google.cloud.notebooks_v1beta1.types.DeleteEnvironmentRequest`):
The request object. Request for deleting a notebook
environment.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1274,28 +1254,25 @@ async def delete_environment(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.empty.Empty``: A generic empty message that
- you can re-use to avoid defining duplicated empty
- messages in your APIs. A typical example is to use it as
- the request or the response type of an API method. For
- instance:
+ The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to
+ use it as the request or the response type of an API
+ method. For instance:
- ::
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns
+ (google.protobuf.Empty);
- service Foo {
- rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- }
+ }
- The JSON representation for ``Empty`` is empty JSON
- object ``{}``.
+ The JSON representation for Empty is empty JSON
+ object {}.
"""
# Create or coerce a protobuf request object.
-
request = service.DeleteEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
@@ -1319,7 +1296,7 @@ async def delete_environment(
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
- empty.Empty,
+ empty_pb2.Empty,
metadata_type=service.OperationMetadata,
)
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py
index 059c910..3ab6f94 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
from distutils import util
import os
@@ -23,10 +21,10 @@
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
-from google.api_core import exceptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
@@ -38,9 +36,8 @@
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
-from google.protobuf import empty_pb2 as empty # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import NotebookServiceGrpcTransport
from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport
@@ -61,7 +58,7 @@ class NotebookServiceClientMeta(type):
_transport_registry["grpc_asyncio"] = NotebookServiceGrpcAsyncIOTransport
def get_transport_class(cls, label: str = None,) -> Type[NotebookServiceTransport]:
- """Return an appropriate transport class.
+ """Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
@@ -84,7 +81,8 @@ class NotebookServiceClient(metaclass=NotebookServiceClientMeta):
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
- """Convert api endpoint to mTLS endpoint.
+ """Converts api endpoint to mTLS endpoint.
+
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
@@ -116,10 +114,27 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ NotebookServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
- file.
+ file.
Args:
filename (str): The path to the service account private key json
@@ -128,7 +143,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ NotebookServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -138,23 +153,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
@property
def transport(self) -> NotebookServiceTransport:
- """Return the transport used by the client instance.
+ """Returns the transport used by the client instance.
Returns:
- NotebookServiceTransport: The transport used by the client instance.
+ NotebookServiceTransport: The transport used by the client
+ instance.
"""
return self._transport
@staticmethod
def environment_path(project: str, environment: str,) -> str:
- """Return a fully-qualified environment string."""
+ """Returns a fully-qualified environment string."""
return "projects/{project}/environments/{environment}".format(
project=project, environment=environment,
)
@staticmethod
def parse_environment_path(path: str) -> Dict[str, str]:
- """Parse a environment path into its component segments."""
+ """Parses a environment path into its component segments."""
m = re.match(
r"^projects/(?P.+?)/environments/(?P.+?)$", path
)
@@ -162,20 +178,20 @@ def parse_environment_path(path: str) -> Dict[str, str]:
@staticmethod
def instance_path(project: str, instance: str,) -> str:
- """Return a fully-qualified instance string."""
+ """Returns a fully-qualified instance string."""
return "projects/{project}/instances/{instance}".format(
project=project, instance=instance,
)
@staticmethod
def parse_instance_path(path: str) -> Dict[str, str]:
- """Parse a instance path into its component segments."""
+ """Parses a instance path into its component segments."""
m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
- """Return a fully-qualified billing_account string."""
+ """Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@@ -188,7 +204,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
@staticmethod
def common_folder_path(folder: str,) -> str:
- """Return a fully-qualified folder string."""
+ """Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
@@ -199,7 +215,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]:
@staticmethod
def common_organization_path(organization: str,) -> str:
- """Return a fully-qualified organization string."""
+ """Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
@@ -210,7 +226,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]:
@staticmethod
def common_project_path(project: str,) -> str:
- """Return a fully-qualified project string."""
+ """Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
@@ -221,7 +237,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]:
@staticmethod
def common_location_path(project: str, location: str,) -> str:
- """Return a fully-qualified location string."""
+ """Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@@ -235,12 +251,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]:
def __init__(
self,
*,
- credentials: Optional[credentials.Credentials] = None,
+ credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, NotebookServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiate the notebook service client.
+ """Instantiates the notebook service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -248,10 +264,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.NotebookServiceTransport]): The
+ transport (Union[str, NotebookServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -287,21 +303,18 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -313,12 +326,14 @@ def __init__(
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
- )
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
)
# Save or instantiate the transport.
@@ -333,8 +348,8 @@ def __init__(
)
if client_options.scopes:
raise ValueError(
- "When providing a transport instance, "
- "provide its scopes directly."
+ "When providing a transport instance, provide its scopes "
+ "directly."
)
self._transport = transport
else:
@@ -344,7 +359,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -360,10 +375,9 @@ def list_instances(
r"""Lists instances in a given project and location.
Args:
- request (:class:`~.service.ListInstancesRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest):
The request object. Request for listing notebook
instances.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -371,7 +385,7 @@ def list_instances(
sent along with the request as metadata.
Returns:
- ~.pagers.ListInstancesPager:
+ google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListInstancesPager:
Response for listing notebook
instances.
Iterating over this object will yield
@@ -380,7 +394,6 @@ def list_instances(
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.ListInstancesRequest.
# There's no risk of modifying the input as we've already verified
@@ -421,10 +434,9 @@ def get_instance(
r"""Gets details of a single Instance.
Args:
- request (:class:`~.service.GetInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.GetInstanceRequest):
The request object. Request for getting a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -432,13 +444,12 @@ def get_instance(
sent along with the request as metadata.
Returns:
- ~.instance.Instance:
+ google.cloud.notebooks_v1beta1.types.Instance:
The definition of a notebook
instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.GetInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -474,10 +485,9 @@ def create_instance(
location.
Args:
- request (:class:`~.service.CreateInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.CreateInstanceRequest):
The request object. Request for creating a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -485,16 +495,15 @@ def create_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.CreateInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -542,10 +551,9 @@ def register_instance(
API.
Args:
- request (:class:`~.service.RegisterInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.RegisterInstanceRequest):
The request object. Request for registering a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -553,16 +561,15 @@ def register_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.RegisterInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -605,10 +612,9 @@ def set_instance_accelerator(
r"""Updates the guest accelerators of a single Instance.
Args:
- request (:class:`~.service.SetInstanceAcceleratorRequest`):
+ request (google.cloud.notebooks_v1beta1.types.SetInstanceAcceleratorRequest):
The request object. Request for setting instance
accelerator.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -616,16 +622,15 @@ def set_instance_accelerator(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.SetInstanceAcceleratorRequest.
# There's no risk of modifying the input as we've already verified
@@ -668,10 +673,9 @@ def set_instance_machine_type(
r"""Updates the machine type of a single Instance.
Args:
- request (:class:`~.service.SetInstanceMachineTypeRequest`):
+ request (google.cloud.notebooks_v1beta1.types.SetInstanceMachineTypeRequest):
The request object. Request for setting instance machine
type.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -679,16 +683,15 @@ def set_instance_machine_type(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.SetInstanceMachineTypeRequest.
# There's no risk of modifying the input as we've already verified
@@ -733,9 +736,8 @@ def set_instance_labels(
r"""Updates the labels of an Instance.
Args:
- request (:class:`~.service.SetInstanceLabelsRequest`):
+ request (google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest):
The request object. Request for setting instance labels.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -743,16 +745,15 @@ def set_instance_labels(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.SetInstanceLabelsRequest.
# There's no risk of modifying the input as we've already verified
@@ -795,10 +796,9 @@ def delete_instance(
r"""Deletes a single Instance.
Args:
- request (:class:`~.service.DeleteInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.DeleteInstanceRequest):
The request object. Request for deleting a notebook
instance.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -806,28 +806,25 @@ def delete_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.empty.Empty``: A generic empty message that
- you can re-use to avoid defining duplicated empty
- messages in your APIs. A typical example is to use it as
- the request or the response type of an API method. For
- instance:
+ The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to
+ use it as the request or the response type of an API
+ method. For instance:
- ::
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns
+ (google.protobuf.Empty);
- service Foo {
- rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- }
+ }
- The JSON representation for ``Empty`` is empty JSON
- object ``{}``.
+ The JSON representation for Empty is empty JSON
+ object {}.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.DeleteInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -852,7 +849,7 @@ def delete_instance(
response = operation.from_gapic(
response,
self._transport.operations_client,
- empty.Empty,
+ empty_pb2.Empty,
metadata_type=service.OperationMetadata,
)
@@ -870,10 +867,9 @@ def start_instance(
r"""Starts a notebook instance.
Args:
- request (:class:`~.service.StartInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.StartInstanceRequest):
The request object. Request for starting a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -881,16 +877,15 @@ def start_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.StartInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -933,10 +928,9 @@ def stop_instance(
r"""Stops a notebook instance.
Args:
- request (:class:`~.service.StopInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.StopInstanceRequest):
The request object. Request for stopping a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -944,16 +938,15 @@ def stop_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.StopInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -996,10 +989,9 @@ def reset_instance(
r"""Resets a notebook instance.
Args:
- request (:class:`~.service.ResetInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ResetInstanceRequest):
The request object. Request for reseting a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1007,16 +999,15 @@ def reset_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.ResetInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -1063,10 +1054,9 @@ def report_instance_info(
this method directly.
Args:
- request (:class:`~.service.ReportInstanceInfoRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest):
The request object. Request for notebook instances to
report information to Notebooks API.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1074,16 +1064,15 @@ def report_instance_info(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.ReportInstanceInfoRequest.
# There's no risk of modifying the input as we've already verified
@@ -1126,10 +1115,9 @@ def is_instance_upgradeable(
r"""Check if a notebook instance is upgradable.
Args:
- request (:class:`~.service.IsInstanceUpgradeableRequest`):
+ request (google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest):
The request object. Request for checking if a notebook
instance is upgradeable.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1137,13 +1125,12 @@ def is_instance_upgradeable(
sent along with the request as metadata.
Returns:
- ~.service.IsInstanceUpgradeableResponse:
+ google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableResponse:
Response for checking if a notebook
instance is upgradeable.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.IsInstanceUpgradeableRequest.
# There's no risk of modifying the input as we've already verified
@@ -1180,10 +1167,9 @@ def upgrade_instance(
r"""Upgrades a notebook instance to the latest version.
Args:
- request (:class:`~.service.UpgradeInstanceRequest`):
+ request (google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest):
The request object. Request for upgrading a notebook
instance
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1191,16 +1177,15 @@ def upgrade_instance(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.UpgradeInstanceRequest.
# There's no risk of modifying the input as we've already verified
@@ -1245,10 +1230,9 @@ def upgrade_instance_internal(
this method directly.
Args:
- request (:class:`~.service.UpgradeInstanceInternalRequest`):
+ request (google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest):
The request object. Request for upgrading a notebook
instance from within the VM
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1256,16 +1240,15 @@ def upgrade_instance_internal(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.instance.Instance``: The definition of a
- notebook instance.
+ :class:`google.cloud.notebooks_v1beta1.types.Instance`
+ The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.UpgradeInstanceInternalRequest.
# There's no risk of modifying the input as we've already verified
@@ -1310,9 +1293,8 @@ def list_environments(
r"""Lists environments in a project.
Args:
- request (:class:`~.service.ListEnvironmentsRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest):
The request object. Request for listing environments.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1320,7 +1302,7 @@ def list_environments(
sent along with the request as metadata.
Returns:
- ~.pagers.ListEnvironmentsPager:
+ google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListEnvironmentsPager:
Response for listing environments.
Iterating over this object will yield
results and resolve additional pages
@@ -1328,7 +1310,6 @@ def list_environments(
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.ListEnvironmentsRequest.
# There's no risk of modifying the input as we've already verified
@@ -1369,10 +1350,9 @@ def get_environment(
r"""Gets details of a single Environment.
Args:
- request (:class:`~.service.GetEnvironmentRequest`):
+ request (google.cloud.notebooks_v1beta1.types.GetEnvironmentRequest):
The request object. Request for getting a notebook
environment.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1380,14 +1360,13 @@ def get_environment(
sent along with the request as metadata.
Returns:
- ~.environment.Environment:
+ google.cloud.notebooks_v1beta1.types.Environment:
Definition of a software environment
that is used to start a notebook
instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.GetEnvironmentRequest.
# There's no risk of modifying the input as we've already verified
@@ -1422,10 +1401,9 @@ def create_environment(
r"""Creates a new Environment.
Args:
- request (:class:`~.service.CreateEnvironmentRequest`):
+ request (google.cloud.notebooks_v1beta1.types.CreateEnvironmentRequest):
The request object. Request for creating a notebook
environment.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1433,17 +1411,14 @@ def create_environment(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.environment.Environment``: Definition of a
- software environment that is used to start a notebook
- instance.
+ The result type for the operation will be :class:`google.cloud.notebooks_v1beta1.types.Environment` Definition of a software environment that is used to start a notebook
+ instance.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.CreateEnvironmentRequest.
# There's no risk of modifying the input as we've already verified
@@ -1486,10 +1461,9 @@ def delete_environment(
r"""Deletes a single Environment.
Args:
- request (:class:`~.service.DeleteEnvironmentRequest`):
+ request (google.cloud.notebooks_v1beta1.types.DeleteEnvironmentRequest):
The request object. Request for deleting a notebook
environment.
-
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
@@ -1497,28 +1471,25 @@ def delete_environment(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.empty.Empty``: A generic empty message that
- you can re-use to avoid defining duplicated empty
- messages in your APIs. A typical example is to use it as
- the request or the response type of an API method. For
- instance:
+ The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to
+ use it as the request or the response type of an API
+ method. For instance:
- ::
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns
+ (google.protobuf.Empty);
- service Foo {
- rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- }
+ }
- The JSON representation for ``Empty`` is empty JSON
- object ``{}``.
+ The JSON representation for Empty is empty JSON
+ object {}.
"""
# Create or coerce a protobuf request object.
-
# Minor optimization to avoid making a copy if the user passes
# in a service.DeleteEnvironmentRequest.
# There's no risk of modifying the input as we've already verified
@@ -1543,7 +1514,7 @@ def delete_environment(
response = operation.from_gapic(
response,
self._transport.operations_client,
- empty.Empty,
+ empty_pb2.Empty,
metadata_type=service.OperationMetadata,
)
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py b/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py
index 15c54f6..d918331 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,8 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
@@ -26,7 +33,7 @@ class ListInstancesPager:
"""A pager for iterating through ``list_instances`` requests.
This class thinly wraps an initial
- :class:`~.service.ListInstancesResponse` object, and
+ :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse` object, and
provides an ``__iter__`` method to iterate through its
``instances`` field.
@@ -35,7 +42,7 @@ class ListInstancesPager:
through the ``instances`` field on the
corresponding responses.
- All the usual :class:`~.service.ListInstancesResponse`
+ All the usual :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -53,9 +60,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.service.ListInstancesRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest):
The initial request object.
- response (:class:`~.service.ListInstancesResponse`):
+ response (google.cloud.notebooks_v1beta1.types.ListInstancesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -88,7 +95,7 @@ class ListInstancesAsyncPager:
"""A pager for iterating through ``list_instances`` requests.
This class thinly wraps an initial
- :class:`~.service.ListInstancesResponse` object, and
+ :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``instances`` field.
@@ -97,7 +104,7 @@ class ListInstancesAsyncPager:
through the ``instances`` field on the
corresponding responses.
- All the usual :class:`~.service.ListInstancesResponse`
+ All the usual :class:`google.cloud.notebooks_v1beta1.types.ListInstancesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -110,14 +117,14 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.service.ListInstancesRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ListInstancesRequest):
The initial request object.
- response (:class:`~.service.ListInstancesResponse`):
+ response (google.cloud.notebooks_v1beta1.types.ListInstancesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -154,7 +161,7 @@ class ListEnvironmentsPager:
"""A pager for iterating through ``list_environments`` requests.
This class thinly wraps an initial
- :class:`~.service.ListEnvironmentsResponse` object, and
+ :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse` object, and
provides an ``__iter__`` method to iterate through its
``environments`` field.
@@ -163,7 +170,7 @@ class ListEnvironmentsPager:
through the ``environments`` field on the
corresponding responses.
- All the usual :class:`~.service.ListEnvironmentsResponse`
+ All the usual :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -181,9 +188,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.service.ListEnvironmentsRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest):
The initial request object.
- response (:class:`~.service.ListEnvironmentsResponse`):
+ response (google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -216,7 +223,7 @@ class ListEnvironmentsAsyncPager:
"""A pager for iterating through ``list_environments`` requests.
This class thinly wraps an initial
- :class:`~.service.ListEnvironmentsResponse` object, and
+ :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``environments`` field.
@@ -225,7 +232,7 @@ class ListEnvironmentsAsyncPager:
through the ``environments`` field on the
corresponding responses.
- All the usual :class:`~.service.ListEnvironmentsResponse`
+ All the usual :class:`google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -238,14 +245,14 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
+ """Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.service.ListEnvironmentsRequest`):
+ request (google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest):
The initial request object.
- response (:class:`~.service.ListEnvironmentsResponse`):
+ response (google.cloud.notebooks_v1beta1.types.ListEnvironmentsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py
index bc1ba94..9c6f2a1 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from collections import OrderedDict
from typing import Dict, Type
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py
index 240525b..affbf2d 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,23 +13,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import abc
-import typing
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+import packaging.version
import pkg_resources
-from google import auth # type: ignore
-from google.api_core import exceptions # type: ignore
+import google.auth # type: ignore
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
-from google.longrunning import operations_pb2 as operations # type: ignore
-
+from google.longrunning import operations_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
@@ -39,27 +38,41 @@
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+try:
+ # google.auth.__version__ was added in 1.26.0
+ _GOOGLE_AUTH_VERSION = google.auth.__version__
+except AttributeError:
+ try: # try pkg_resources if it is available
+ _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
+ except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GOOGLE_AUTH_VERSION = None
+
+_API_CORE_VERSION = google.api_core.__version__
+
class NotebookServiceTransport(abc.ABC):
"""Abstract transport class for NotebookService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+ DEFAULT_HOST: str = "notebooks.googleapis.com"
+
def __init__(
self,
*,
- host: str = "notebooks.googleapis.com",
- credentials: credentials.Credentials = None,
- credentials_file: typing.Optional[str] = None,
- scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
- quota_project_id: typing.Optional[str] = None,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -68,13 +81,13 @@ def __init__(
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
- scope (Optional[Sequence[str]]): A list of scopes.
+ scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -82,28 +95,75 @@ def __init__(
host += ":443"
self._host = host
+ scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
+
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
- raise exceptions.DuplicateCredentialArgs(
+ raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
- credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
- credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
+ # TODO(busunkim): These two class methods are in the base transport
+ # to avoid duplicating code across the transport classes. These functions
+ # should be deleted once the minimum required versions of google-api-core
+ # and google-auth are increased.
+
+ # TODO: Remove this function once google-auth >= 1.25.0 is required
+ @classmethod
+ def _get_scopes_kwargs(
+ cls, host: str, scopes: Optional[Sequence[str]]
+ ) -> Dict[str, Optional[Sequence[str]]]:
+ """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
+
+ scopes_kwargs = {}
+
+ if _GOOGLE_AUTH_VERSION and (
+ packaging.version.parse(_GOOGLE_AUTH_VERSION)
+ >= packaging.version.parse("1.25.0")
+ ):
+ scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
+ else:
+ scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
+
+ return scopes_kwargs
+
+ # TODO: Remove this function once google-api-core >= 1.26.0 is required
+ @classmethod
+ def _get_self_signed_jwt_kwargs(
+ cls, host: str, scopes: Optional[Sequence[str]]
+ ) -> Dict[str, Union[Optional[Sequence[str]], str]]:
+ """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version"""
+
+ self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {}
+
+ if _API_CORE_VERSION and (
+ packaging.version.parse(_API_CORE_VERSION)
+ >= packaging.version.parse("1.26.0")
+ ):
+ self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES
+ self_signed_jwt_kwargs["scopes"] = scopes
+ self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST
+ else:
+ self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES
+
+ return self_signed_jwt_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
@@ -185,122 +245,119 @@ def operations_client(self) -> operations_v1.OperationsClient:
@property
def list_instances(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.ListInstancesRequest],
- typing.Union[
- service.ListInstancesResponse,
- typing.Awaitable[service.ListInstancesResponse],
- ],
+ Union[service.ListInstancesResponse, Awaitable[service.ListInstancesResponse]],
]:
raise NotImplementedError()
@property
def get_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.GetInstanceRequest],
- typing.Union[instance.Instance, typing.Awaitable[instance.Instance]],
+ Union[instance.Instance, Awaitable[instance.Instance]],
]:
raise NotImplementedError()
@property
def create_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.CreateInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def register_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.RegisterInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def set_instance_accelerator(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.SetInstanceAcceleratorRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def set_instance_machine_type(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.SetInstanceMachineTypeRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def set_instance_labels(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.SetInstanceLabelsRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.DeleteInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def start_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.StartInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def stop_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.StopInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def reset_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.ResetInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def report_instance_info(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.ReportInstanceInfoRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def is_instance_upgradeable(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.IsInstanceUpgradeableRequest],
- typing.Union[
+ Union[
service.IsInstanceUpgradeableResponse,
- typing.Awaitable[service.IsInstanceUpgradeableResponse],
+ Awaitable[service.IsInstanceUpgradeableResponse],
],
]:
raise NotImplementedError()
@@ -308,29 +365,29 @@ def is_instance_upgradeable(
@property
def upgrade_instance(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.UpgradeInstanceRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def upgrade_instance_internal(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.UpgradeInstanceInternalRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_environments(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.ListEnvironmentsRequest],
- typing.Union[
+ Union[
service.ListEnvironmentsResponse,
- typing.Awaitable[service.ListEnvironmentsResponse],
+ Awaitable[service.ListEnvironmentsResponse],
],
]:
raise NotImplementedError()
@@ -338,29 +395,27 @@ def list_environments(
@property
def get_environment(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.GetEnvironmentRequest],
- typing.Union[
- environment.Environment, typing.Awaitable[environment.Environment]
- ],
+ Union[environment.Environment, Awaitable[environment.Environment]],
]:
raise NotImplementedError()
@property
def create_environment(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.CreateEnvironmentRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_environment(
self,
- ) -> typing.Callable[
+ ) -> Callable[
[service.DeleteEnvironmentRequest],
- typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py
index 9b30743..fe6a527 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,15 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import warnings
-from typing import Callable, Dict, Optional, Sequence, Tuple
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.api_core import gapic_v1 # type: ignore
-from google import auth # type: ignore
-from google.auth import credentials # type: ignore
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
@@ -30,8 +28,7 @@
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
-from google.longrunning import operations_pb2 as operations # type: ignore
-
+from google.longrunning import operations_pb2 # type: ignore
from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
@@ -54,20 +51,22 @@ def __init__(
self,
*,
host: str = "notebooks.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -91,6 +90,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -105,72 +108,61 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -178,24 +170,14 @@ def __init__(
],
)
- self._stubs = {} # type: Dict[str, Callable]
- self._operations_client = None
-
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "notebooks.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
@@ -203,7 +185,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -226,13 +208,15 @@ def create_channel(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
- scopes = scopes or cls.AUTH_SCOPES
+
+ self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
+
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes,
quota_project_id=quota_project_id,
+ **self_signed_jwt_kwargs,
**kwargs,
)
@@ -309,7 +293,7 @@ def get_instance(self) -> Callable[[service.GetInstanceRequest], instance.Instan
@property
def create_instance(
self,
- ) -> Callable[[service.CreateInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the create instance method over gRPC.
Creates a new Instance in a given project and
@@ -329,14 +313,14 @@ def create_instance(
self._stubs["create_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/CreateInstance",
request_serializer=service.CreateInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_instance"]
@property
def register_instance(
self,
- ) -> Callable[[service.RegisterInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.RegisterInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the register instance method over gRPC.
Registers an existing legacy notebook instance to the
@@ -360,14 +344,14 @@ def register_instance(
self._stubs["register_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/RegisterInstance",
request_serializer=service.RegisterInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["register_instance"]
@property
def set_instance_accelerator(
self,
- ) -> Callable[[service.SetInstanceAcceleratorRequest], operations.Operation]:
+ ) -> Callable[[service.SetInstanceAcceleratorRequest], operations_pb2.Operation]:
r"""Return a callable for the set instance accelerator method over gRPC.
Updates the guest accelerators of a single Instance.
@@ -386,14 +370,14 @@ def set_instance_accelerator(
self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceAccelerator",
request_serializer=service.SetInstanceAcceleratorRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["set_instance_accelerator"]
@property
def set_instance_machine_type(
self,
- ) -> Callable[[service.SetInstanceMachineTypeRequest], operations.Operation]:
+ ) -> Callable[[service.SetInstanceMachineTypeRequest], operations_pb2.Operation]:
r"""Return a callable for the set instance machine type method over gRPC.
Updates the machine type of a single Instance.
@@ -412,14 +396,14 @@ def set_instance_machine_type(
self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceMachineType",
request_serializer=service.SetInstanceMachineTypeRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["set_instance_machine_type"]
@property
def set_instance_labels(
self,
- ) -> Callable[[service.SetInstanceLabelsRequest], operations.Operation]:
+ ) -> Callable[[service.SetInstanceLabelsRequest], operations_pb2.Operation]:
r"""Return a callable for the set instance labels method over gRPC.
Updates the labels of an Instance.
@@ -438,14 +422,14 @@ def set_instance_labels(
self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceLabels",
request_serializer=service.SetInstanceLabelsRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["set_instance_labels"]
@property
def delete_instance(
self,
- ) -> Callable[[service.DeleteInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the delete instance method over gRPC.
Deletes a single Instance.
@@ -464,14 +448,14 @@ def delete_instance(
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/DeleteInstance",
request_serializer=service.DeleteInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"]
@property
def start_instance(
self,
- ) -> Callable[[service.StartInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.StartInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the start instance method over gRPC.
Starts a notebook instance.
@@ -490,14 +474,14 @@ def start_instance(
self._stubs["start_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/StartInstance",
request_serializer=service.StartInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["start_instance"]
@property
def stop_instance(
self,
- ) -> Callable[[service.StopInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.StopInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the stop instance method over gRPC.
Stops a notebook instance.
@@ -516,14 +500,14 @@ def stop_instance(
self._stubs["stop_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/StopInstance",
request_serializer=service.StopInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["stop_instance"]
@property
def reset_instance(
self,
- ) -> Callable[[service.ResetInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.ResetInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the reset instance method over gRPC.
Resets a notebook instance.
@@ -542,14 +526,14 @@ def reset_instance(
self._stubs["reset_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/ResetInstance",
request_serializer=service.ResetInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["reset_instance"]
@property
def report_instance_info(
self,
- ) -> Callable[[service.ReportInstanceInfoRequest], operations.Operation]:
+ ) -> Callable[[service.ReportInstanceInfoRequest], operations_pb2.Operation]:
r"""Return a callable for the report instance info method over gRPC.
Allows notebook instances to
@@ -572,7 +556,7 @@ def report_instance_info(
self._stubs["report_instance_info"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/ReportInstanceInfo",
request_serializer=service.ReportInstanceInfoRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["report_instance_info"]
@@ -607,7 +591,7 @@ def is_instance_upgradeable(
@property
def upgrade_instance(
self,
- ) -> Callable[[service.UpgradeInstanceRequest], operations.Operation]:
+ ) -> Callable[[service.UpgradeInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the upgrade instance method over gRPC.
Upgrades a notebook instance to the latest version.
@@ -626,14 +610,14 @@ def upgrade_instance(
self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstance",
request_serializer=service.UpgradeInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["upgrade_instance"]
@property
def upgrade_instance_internal(
self,
- ) -> Callable[[service.UpgradeInstanceInternalRequest], operations.Operation]:
+ ) -> Callable[[service.UpgradeInstanceInternalRequest], operations_pb2.Operation]:
r"""Return a callable for the upgrade instance internal method over gRPC.
Allows notebook instances to
@@ -654,7 +638,7 @@ def upgrade_instance_internal(
self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstanceInternal",
request_serializer=service.UpgradeInstanceInternalRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["upgrade_instance_internal"]
@@ -713,7 +697,7 @@ def get_environment(
@property
def create_environment(
self,
- ) -> Callable[[service.CreateEnvironmentRequest], operations.Operation]:
+ ) -> Callable[[service.CreateEnvironmentRequest], operations_pb2.Operation]:
r"""Return a callable for the create environment method over gRPC.
Creates a new Environment.
@@ -732,14 +716,14 @@ def create_environment(
self._stubs["create_environment"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/CreateEnvironment",
request_serializer=service.CreateEnvironmentRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_environment"]
@property
def delete_environment(
self,
- ) -> Callable[[service.DeleteEnvironmentRequest], operations.Operation]:
+ ) -> Callable[[service.DeleteEnvironmentRequest], operations_pb2.Operation]:
r"""Return a callable for the delete environment method over gRPC.
Deletes a single Environment.
@@ -758,7 +742,7 @@ def delete_environment(
self._stubs["delete_environment"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/DeleteEnvironment",
request_serializer=service.DeleteEnvironmentRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_environment"]
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
index a68037d..a7b0fc3 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,16 +13,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import warnings
-from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
-from google import auth # type: ignore
-from google.auth import credentials # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
@@ -31,8 +29,7 @@
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
-from google.longrunning import operations_pb2 as operations # type: ignore
-
+from google.longrunning import operations_pb2 # type: ignore
from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import NotebookServiceGrpcTransport
@@ -57,7 +54,7 @@ class NotebookServiceGrpcAsyncIOTransport(NotebookServiceTransport):
def create_channel(
cls,
host: str = "notebooks.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
@@ -65,7 +62,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -84,13 +81,15 @@ def create_channel(
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
- scopes = scopes or cls.AUTH_SCOPES
+
+ self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
+
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes,
quota_project_id=quota_project_id,
+ **self_signed_jwt_kwargs,
**kwargs,
)
@@ -98,20 +97,22 @@ def __init__(
self,
*,
host: str = "notebooks.googleapis.com",
- credentials: credentials.Credentials = None,
+ credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
- host (Optional[str]): The hostname to connect to.
+ host (Optional[str]):
+ The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -136,12 +137,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -150,72 +155,60 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -223,18 +216,8 @@ def __init__(
],
)
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
-
- self._stubs = {}
- self._operations_client = None
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
@@ -319,7 +302,7 @@ def get_instance(
@property
def create_instance(
self,
- ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create instance method over gRPC.
Creates a new Instance in a given project and
@@ -339,14 +322,16 @@ def create_instance(
self._stubs["create_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/CreateInstance",
request_serializer=service.CreateInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_instance"]
@property
def register_instance(
self,
- ) -> Callable[[service.RegisterInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[
+ [service.RegisterInstanceRequest], Awaitable[operations_pb2.Operation]
+ ]:
r"""Return a callable for the register instance method over gRPC.
Registers an existing legacy notebook instance to the
@@ -370,7 +355,7 @@ def register_instance(
self._stubs["register_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/RegisterInstance",
request_serializer=service.RegisterInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["register_instance"]
@@ -378,7 +363,7 @@ def register_instance(
def set_instance_accelerator(
self,
) -> Callable[
- [service.SetInstanceAcceleratorRequest], Awaitable[operations.Operation]
+ [service.SetInstanceAcceleratorRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the set instance accelerator method over gRPC.
@@ -398,7 +383,7 @@ def set_instance_accelerator(
self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceAccelerator",
request_serializer=service.SetInstanceAcceleratorRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["set_instance_accelerator"]
@@ -406,7 +391,7 @@ def set_instance_accelerator(
def set_instance_machine_type(
self,
) -> Callable[
- [service.SetInstanceMachineTypeRequest], Awaitable[operations.Operation]
+ [service.SetInstanceMachineTypeRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the set instance machine type method over gRPC.
@@ -426,14 +411,16 @@ def set_instance_machine_type(
self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceMachineType",
request_serializer=service.SetInstanceMachineTypeRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["set_instance_machine_type"]
@property
def set_instance_labels(
self,
- ) -> Callable[[service.SetInstanceLabelsRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[
+ [service.SetInstanceLabelsRequest], Awaitable[operations_pb2.Operation]
+ ]:
r"""Return a callable for the set instance labels method over gRPC.
Updates the labels of an Instance.
@@ -452,14 +439,14 @@ def set_instance_labels(
self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceLabels",
request_serializer=service.SetInstanceLabelsRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["set_instance_labels"]
@property
def delete_instance(
self,
- ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete instance method over gRPC.
Deletes a single Instance.
@@ -478,14 +465,14 @@ def delete_instance(
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/DeleteInstance",
request_serializer=service.DeleteInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"]
@property
def start_instance(
self,
- ) -> Callable[[service.StartInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[[service.StartInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the start instance method over gRPC.
Starts a notebook instance.
@@ -504,14 +491,14 @@ def start_instance(
self._stubs["start_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/StartInstance",
request_serializer=service.StartInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["start_instance"]
@property
def stop_instance(
self,
- ) -> Callable[[service.StopInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[[service.StopInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the stop instance method over gRPC.
Stops a notebook instance.
@@ -530,14 +517,14 @@ def stop_instance(
self._stubs["stop_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/StopInstance",
request_serializer=service.StopInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["stop_instance"]
@property
def reset_instance(
self,
- ) -> Callable[[service.ResetInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[[service.ResetInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the reset instance method over gRPC.
Resets a notebook instance.
@@ -556,14 +543,16 @@ def reset_instance(
self._stubs["reset_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/ResetInstance",
request_serializer=service.ResetInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["reset_instance"]
@property
def report_instance_info(
self,
- ) -> Callable[[service.ReportInstanceInfoRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[
+ [service.ReportInstanceInfoRequest], Awaitable[operations_pb2.Operation]
+ ]:
r"""Return a callable for the report instance info method over gRPC.
Allows notebook instances to
@@ -586,7 +575,7 @@ def report_instance_info(
self._stubs["report_instance_info"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/ReportInstanceInfo",
request_serializer=service.ReportInstanceInfoRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["report_instance_info"]
@@ -622,7 +611,9 @@ def is_instance_upgradeable(
@property
def upgrade_instance(
self,
- ) -> Callable[[service.UpgradeInstanceRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[
+ [service.UpgradeInstanceRequest], Awaitable[operations_pb2.Operation]
+ ]:
r"""Return a callable for the upgrade instance method over gRPC.
Upgrades a notebook instance to the latest version.
@@ -641,7 +632,7 @@ def upgrade_instance(
self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstance",
request_serializer=service.UpgradeInstanceRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["upgrade_instance"]
@@ -649,7 +640,7 @@ def upgrade_instance(
def upgrade_instance_internal(
self,
) -> Callable[
- [service.UpgradeInstanceInternalRequest], Awaitable[operations.Operation]
+ [service.UpgradeInstanceInternalRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the upgrade instance internal method over gRPC.
@@ -671,7 +662,7 @@ def upgrade_instance_internal(
self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstanceInternal",
request_serializer=service.UpgradeInstanceInternalRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["upgrade_instance_internal"]
@@ -732,7 +723,9 @@ def get_environment(
@property
def create_environment(
self,
- ) -> Callable[[service.CreateEnvironmentRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[
+ [service.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation]
+ ]:
r"""Return a callable for the create environment method over gRPC.
Creates a new Environment.
@@ -751,14 +744,16 @@ def create_environment(
self._stubs["create_environment"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/CreateEnvironment",
request_serializer=service.CreateEnvironmentRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_environment"]
@property
def delete_environment(
self,
- ) -> Callable[[service.DeleteEnvironmentRequest], Awaitable[operations.Operation]]:
+ ) -> Callable[
+ [service.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation]
+ ]:
r"""Return a callable for the delete environment method over gRPC.
Deletes a single Environment.
@@ -777,7 +772,7 @@ def delete_environment(
self._stubs["delete_environment"] = self.grpc_channel.unary_unary(
"/google.cloud.notebooks.v1beta1.NotebookService/DeleteEnvironment",
request_serializer=service.DeleteEnvironmentRequest.serialize,
- response_deserializer=operations.Operation.FromString,
+ response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_environment"]
diff --git a/google/cloud/notebooks_v1beta1/types/__init__.py b/google/cloud/notebooks_v1beta1/types/__init__.py
index 853a88d..6982c3d 100644
--- a/google/cloud/notebooks_v1beta1/types/__init__.py
+++ b/google/cloud/notebooks_v1beta1/types/__init__.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,65 +13,64 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
from .environment import (
+ ContainerImage,
Environment,
VmImage,
- ContainerImage,
)
from .instance import Instance
from .service import (
- OperationMetadata,
+ CreateEnvironmentRequest,
+ CreateInstanceRequest,
+ DeleteEnvironmentRequest,
+ DeleteInstanceRequest,
+ GetEnvironmentRequest,
+ GetInstanceRequest,
+ IsInstanceUpgradeableRequest,
+ IsInstanceUpgradeableResponse,
+ ListEnvironmentsRequest,
+ ListEnvironmentsResponse,
ListInstancesRequest,
ListInstancesResponse,
- GetInstanceRequest,
- CreateInstanceRequest,
+ OperationMetadata,
RegisterInstanceRequest,
+ ReportInstanceInfoRequest,
+ ResetInstanceRequest,
SetInstanceAcceleratorRequest,
- SetInstanceMachineTypeRequest,
SetInstanceLabelsRequest,
- DeleteInstanceRequest,
+ SetInstanceMachineTypeRequest,
StartInstanceRequest,
StopInstanceRequest,
- ResetInstanceRequest,
- ReportInstanceInfoRequest,
- IsInstanceUpgradeableRequest,
- IsInstanceUpgradeableResponse,
- UpgradeInstanceRequest,
UpgradeInstanceInternalRequest,
- ListEnvironmentsRequest,
- ListEnvironmentsResponse,
- GetEnvironmentRequest,
- CreateEnvironmentRequest,
- DeleteEnvironmentRequest,
+ UpgradeInstanceRequest,
)
__all__ = (
+ "ContainerImage",
"Environment",
"VmImage",
- "ContainerImage",
"Instance",
- "OperationMetadata",
+ "CreateEnvironmentRequest",
+ "CreateInstanceRequest",
+ "DeleteEnvironmentRequest",
+ "DeleteInstanceRequest",
+ "GetEnvironmentRequest",
+ "GetInstanceRequest",
+ "IsInstanceUpgradeableRequest",
+ "IsInstanceUpgradeableResponse",
+ "ListEnvironmentsRequest",
+ "ListEnvironmentsResponse",
"ListInstancesRequest",
"ListInstancesResponse",
- "GetInstanceRequest",
- "CreateInstanceRequest",
+ "OperationMetadata",
"RegisterInstanceRequest",
+ "ReportInstanceInfoRequest",
+ "ResetInstanceRequest",
"SetInstanceAcceleratorRequest",
- "SetInstanceMachineTypeRequest",
"SetInstanceLabelsRequest",
- "DeleteInstanceRequest",
+ "SetInstanceMachineTypeRequest",
"StartInstanceRequest",
"StopInstanceRequest",
- "ResetInstanceRequest",
- "ReportInstanceInfoRequest",
- "IsInstanceUpgradeableRequest",
- "IsInstanceUpgradeableResponse",
- "UpgradeInstanceRequest",
"UpgradeInstanceInternalRequest",
- "ListEnvironmentsRequest",
- "ListEnvironmentsResponse",
- "GetEnvironmentRequest",
- "CreateEnvironmentRequest",
- "DeleteEnvironmentRequest",
+ "UpgradeInstanceRequest",
)
diff --git a/google/cloud/notebooks_v1beta1/types/environment.py b/google/cloud/notebooks_v1beta1/types/environment.py
index 5f13677..5d2aeac 100644
--- a/google/cloud/notebooks_v1beta1/types/environment.py
+++ b/google/cloud/notebooks_v1beta1/types/environment.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,11 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import proto # type: ignore
-
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -39,10 +36,10 @@ class Environment(proto.Message):
Display name of this environment for the UI.
description (str):
A brief description of this environment.
- vm_image (~.environment.VmImage):
+ vm_image (google.cloud.notebooks_v1beta1.types.VmImage):
Use a Compute Engine VM image to start the
notebook instance.
- container_image (~.environment.ContainerImage):
+ container_image (google.cloud.notebooks_v1beta1.types.ContainerImage):
Use a container image to start the notebook
instance.
post_startup_script (str):
@@ -50,28 +47,22 @@ class Environment(proto.Message):
notebook instance fully boots up. The path must be a URL or
Cloud Storage path. Example:
``"gs://path-to-file/file-name"``
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time at which this
environment was created.
"""
- name = proto.Field(proto.STRING, number=1)
-
- display_name = proto.Field(proto.STRING, number=2)
-
- description = proto.Field(proto.STRING, number=3)
-
+ name = proto.Field(proto.STRING, number=1,)
+ display_name = proto.Field(proto.STRING, number=2,)
+ description = proto.Field(proto.STRING, number=3,)
vm_image = proto.Field(
proto.MESSAGE, number=6, oneof="image_type", message="VmImage",
)
-
container_image = proto.Field(
proto.MESSAGE, number=7, oneof="image_type", message="ContainerImage",
)
-
- post_startup_script = proto.Field(proto.STRING, number=8)
-
- create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,)
+ post_startup_script = proto.Field(proto.STRING, number=8,)
+ create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,)
class VmImage(proto.Message):
@@ -90,11 +81,9 @@ class VmImage(proto.Message):
the newest image in this family will be used.
"""
- project = proto.Field(proto.STRING, number=1)
-
- image_name = proto.Field(proto.STRING, number=2, oneof="image")
-
- image_family = proto.Field(proto.STRING, number=3, oneof="image")
+ project = proto.Field(proto.STRING, number=1,)
+ image_name = proto.Field(proto.STRING, number=2, oneof="image",)
+ image_family = proto.Field(proto.STRING, number=3, oneof="image",)
class ContainerImage(proto.Message):
@@ -110,9 +99,8 @@ class ContainerImage(proto.Message):
specified, this defaults to the latest tag.
"""
- repository = proto.Field(proto.STRING, number=1)
-
- tag = proto.Field(proto.STRING, number=2)
+ repository = proto.Field(proto.STRING, number=1,)
+ tag = proto.Field(proto.STRING, number=2,)
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/notebooks_v1beta1/types/instance.py b/google/cloud/notebooks_v1beta1/types/instance.py
index 9a775f3..3f862be 100644
--- a/google/cloud/notebooks_v1beta1/types/instance.py
+++ b/google/cloud/notebooks_v1beta1/types/instance.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,12 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import proto # type: ignore
-
from google.cloud.notebooks_v1beta1.types import environment
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -29,15 +26,14 @@
class Instance(proto.Message):
r"""The definition of a notebook instance.
-
Attributes:
name (str):
Output only. The name of this notebook instance. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
- vm_image (~.environment.VmImage):
+ vm_image (google.cloud.notebooks_v1beta1.types.VmImage):
Use a Compute Engine VM image to start the
notebook instance.
- container_image (~.environment.ContainerImage):
+ container_image (google.cloud.notebooks_v1beta1.types.ContainerImage):
Use a container image to start the notebook
instance.
post_startup_script (str):
@@ -67,12 +63,12 @@ class Instance(proto.Message):
Required. The `Compute Engine machine
type `__
of this instance.
- accelerator_config (~.instance.Instance.AcceleratorConfig):
+ accelerator_config (google.cloud.notebooks_v1beta1.types.Instance.AcceleratorConfig):
The hardware accelerator used on this instance. If you use
accelerators, make sure that your configuration has `enough
vCPUs and memory to support the ``machine_type`` you have
selected `__.
- state (~.instance.Instance.State):
+ state (google.cloud.notebooks_v1beta1.types.Instance.State):
Output only. The state of this instance.
install_gpu_driver (bool):
Whether the end user authorizes Google Cloud
@@ -84,7 +80,7 @@ class Instance(proto.Message):
Specify a custom Cloud Storage path where the
GPU driver is stored. If not specified, we'll
automatically choose from official GPU drivers.
- boot_disk_type (~.instance.Instance.DiskType):
+ boot_disk_type (google.cloud.notebooks_v1beta1.types.Instance.DiskType):
Input only. The type of the boot disk attached to this
instance, defaults to standard persistent disk
(``PD_STANDARD``).
@@ -94,7 +90,7 @@ class Instance(proto.Message):
64000 GB (64 TB). The minimum
recommended value is 100 GB. If not
specified, this defaults to 100.
- data_disk_type (~.instance.Instance.DiskType):
+ data_disk_type (google.cloud.notebooks_v1beta1.types.Instance.DiskType):
Input only. The type of the data disk attached to this
instance, defaults to standard persistent disk
(``PD_STANDARD``).
@@ -108,7 +104,7 @@ class Instance(proto.Message):
no_remove_data_disk (bool):
Input only. If true, the data disk will not
be auto deleted when deleting the instance.
- disk_encryption (~.instance.Instance.DiskEncryption):
+ disk_encryption (google.cloud.notebooks_v1beta1.types.Instance.DiskEncryption):
Input only. Disk encryption method used on
the boot and data disks, defaults to GMEK.
kms_key (str):
@@ -130,15 +126,15 @@ class Instance(proto.Message):
subnet (str):
The name of the subnet that this instance is in. Format:
``projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}``
- labels (Sequence[~.instance.Instance.LabelsEntry]):
+ labels (Sequence[google.cloud.notebooks_v1beta1.types.Instance.LabelsEntry]):
Labels to apply to this instance.
These can be later modified by the setLabels
method.
- metadata (Sequence[~.instance.Instance.MetadataEntry]):
+ metadata (Sequence[google.cloud.notebooks_v1beta1.types.Instance.MetadataEntry]):
Custom metadata to apply to this instance.
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Instance creation time.
- update_time (~.timestamp.Timestamp):
+ update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Instance update time.
"""
@@ -191,78 +187,55 @@ class AcceleratorConfig(proto.Message):
combination. TPUs are not supported.
Attributes:
- type_ (~.instance.Instance.AcceleratorType):
+ type_ (google.cloud.notebooks_v1beta1.types.Instance.AcceleratorType):
Type of this accelerator.
core_count (int):
Count of cores of this accelerator.
"""
type_ = proto.Field(proto.ENUM, number=1, enum="Instance.AcceleratorType",)
+ core_count = proto.Field(proto.INT64, number=2,)
- core_count = proto.Field(proto.INT64, number=2)
-
- name = proto.Field(proto.STRING, number=1)
-
+ name = proto.Field(proto.STRING, number=1,)
vm_image = proto.Field(
proto.MESSAGE, number=2, oneof="environment", message=environment.VmImage,
)
-
container_image = proto.Field(
proto.MESSAGE,
number=3,
oneof="environment",
message=environment.ContainerImage,
)
-
- post_startup_script = proto.Field(proto.STRING, number=4)
-
- proxy_uri = proto.Field(proto.STRING, number=5)
-
- instance_owners = proto.RepeatedField(proto.STRING, number=6)
-
- service_account = proto.Field(proto.STRING, number=7)
-
- machine_type = proto.Field(proto.STRING, number=8)
-
+ post_startup_script = proto.Field(proto.STRING, number=4,)
+ proxy_uri = proto.Field(proto.STRING, number=5,)
+ instance_owners = proto.RepeatedField(proto.STRING, number=6,)
+ service_account = proto.Field(proto.STRING, number=7,)
+ machine_type = proto.Field(proto.STRING, number=8,)
accelerator_config = proto.Field(
proto.MESSAGE, number=9, message=AcceleratorConfig,
)
-
state = proto.Field(proto.ENUM, number=10, enum=State,)
-
- install_gpu_driver = proto.Field(proto.BOOL, number=11)
-
- custom_gpu_driver_path = proto.Field(proto.STRING, number=12)
-
+ install_gpu_driver = proto.Field(proto.BOOL, number=11,)
+ custom_gpu_driver_path = proto.Field(proto.STRING, number=12,)
boot_disk_type = proto.Field(proto.ENUM, number=13, enum=DiskType,)
-
- boot_disk_size_gb = proto.Field(proto.INT64, number=14)
-
+ boot_disk_size_gb = proto.Field(proto.INT64, number=14,)
data_disk_type = proto.Field(proto.ENUM, number=25, enum=DiskType,)
-
- data_disk_size_gb = proto.Field(proto.INT64, number=26)
-
- no_remove_data_disk = proto.Field(proto.BOOL, number=27)
-
+ data_disk_size_gb = proto.Field(proto.INT64, number=26,)
+ no_remove_data_disk = proto.Field(proto.BOOL, number=27,)
disk_encryption = proto.Field(proto.ENUM, number=15, enum=DiskEncryption,)
-
- kms_key = proto.Field(proto.STRING, number=16)
-
- no_public_ip = proto.Field(proto.BOOL, number=17)
-
- no_proxy_access = proto.Field(proto.BOOL, number=18)
-
- network = proto.Field(proto.STRING, number=19)
-
- subnet = proto.Field(proto.STRING, number=20)
-
- labels = proto.MapField(proto.STRING, proto.STRING, number=21)
-
- metadata = proto.MapField(proto.STRING, proto.STRING, number=22)
-
- create_time = proto.Field(proto.MESSAGE, number=23, message=timestamp.Timestamp,)
-
- update_time = proto.Field(proto.MESSAGE, number=24, message=timestamp.Timestamp,)
+ kms_key = proto.Field(proto.STRING, number=16,)
+ no_public_ip = proto.Field(proto.BOOL, number=17,)
+ no_proxy_access = proto.Field(proto.BOOL, number=18,)
+ network = proto.Field(proto.STRING, number=19,)
+ subnet = proto.Field(proto.STRING, number=20,)
+ labels = proto.MapField(proto.STRING, proto.STRING, number=21,)
+ metadata = proto.MapField(proto.STRING, proto.STRING, number=22,)
+ create_time = proto.Field(
+ proto.MESSAGE, number=23, message=timestamp_pb2.Timestamp,
+ )
+ update_time = proto.Field(
+ proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp,
+ )
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/notebooks_v1beta1/types/service.py b/google/cloud/notebooks_v1beta1/types/service.py
index 75b9cc7..e25802a 100644
--- a/google/cloud/notebooks_v1beta1/types/service.py
+++ b/google/cloud/notebooks_v1beta1/types/service.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,13 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import proto # type: ignore
-
from google.cloud.notebooks_v1beta1.types import environment as gcn_environment
from google.cloud.notebooks_v1beta1.types import instance as gcn_instance
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -55,11 +52,10 @@
class OperationMetadata(proto.Message):
r"""Represents the metadata of the long-running operation.
-
Attributes:
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
The time the operation was created.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time the operation finished running.
target (str):
Server-defined resource path for the target
@@ -81,26 +77,18 @@ class OperationMetadata(proto.Message):
API endpoint name of this operation.
"""
- create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
-
- end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
-
- target = proto.Field(proto.STRING, number=3)
-
- verb = proto.Field(proto.STRING, number=4)
-
- status_message = proto.Field(proto.STRING, number=5)
-
- requested_cancellation = proto.Field(proto.BOOL, number=6)
-
- api_version = proto.Field(proto.STRING, number=7)
-
- endpoint = proto.Field(proto.STRING, number=8)
+ create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
+ target = proto.Field(proto.STRING, number=3,)
+ verb = proto.Field(proto.STRING, number=4,)
+ status_message = proto.Field(proto.STRING, number=5,)
+ requested_cancellation = proto.Field(proto.BOOL, number=6,)
+ api_version = proto.Field(proto.STRING, number=7,)
+ endpoint = proto.Field(proto.STRING, number=8,)
class ListInstancesRequest(proto.Message):
r"""Request for listing notebook instances.
-
Attributes:
parent (str):
Required. Format:
@@ -112,18 +100,15 @@ class ListInstancesRequest(proto.Message):
used to continue listing from the last result.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- page_size = proto.Field(proto.INT32, number=2)
-
- page_token = proto.Field(proto.STRING, number=3)
+ parent = proto.Field(proto.STRING, number=1,)
+ page_size = proto.Field(proto.INT32, number=2,)
+ page_token = proto.Field(proto.STRING, number=3,)
class ListInstancesResponse(proto.Message):
r"""Response for listing notebook instances.
-
Attributes:
- instances (Sequence[~.gcn_instance.Instance]):
+ instances (Sequence[google.cloud.notebooks_v1beta1.types.Instance]):
A list of returned instances.
next_page_token (str):
Page token that can be used to continue
@@ -142,27 +127,23 @@ def raw_page(self):
instances = proto.RepeatedField(
proto.MESSAGE, number=1, message=gcn_instance.Instance,
)
-
- next_page_token = proto.Field(proto.STRING, number=2)
-
- unreachable = proto.RepeatedField(proto.STRING, number=3)
+ next_page_token = proto.Field(proto.STRING, number=2,)
+ unreachable = proto.RepeatedField(proto.STRING, number=3,)
class GetInstanceRequest(proto.Message):
r"""Request for getting a notebook instance.
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class CreateInstanceRequest(proto.Message):
r"""Request for creating a notebook instance.
-
Attributes:
parent (str):
Required. Format:
@@ -170,20 +151,17 @@ class CreateInstanceRequest(proto.Message):
instance_id (str):
Required. User-defined unique ID of this
instance.
- instance (~.gcn_instance.Instance):
+ instance (google.cloud.notebooks_v1beta1.types.Instance):
Required. The instance to be created.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- instance_id = proto.Field(proto.STRING, number=2)
-
+ parent = proto.Field(proto.STRING, number=1,)
+ instance_id = proto.Field(proto.STRING, number=2,)
instance = proto.Field(proto.MESSAGE, number=3, message=gcn_instance.Instance,)
class RegisterInstanceRequest(proto.Message):
r"""Request for registering a notebook instance.
-
Attributes:
parent (str):
Required. Format:
@@ -196,19 +174,17 @@ class RegisterInstanceRequest(proto.Message):
character cannot be a dash.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- instance_id = proto.Field(proto.STRING, number=2)
+ parent = proto.Field(proto.STRING, number=1,)
+ instance_id = proto.Field(proto.STRING, number=2,)
class SetInstanceAcceleratorRequest(proto.Message):
r"""Request for setting instance accelerator.
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
- type_ (~.gcn_instance.Instance.AcceleratorType):
+ type_ (google.cloud.notebooks_v1beta1.types.Instance.AcceleratorType):
Required. Type of this accelerator.
core_count (int):
Required. Count of cores of this accelerator. Note that not
@@ -218,18 +194,15 @@ class SetInstanceAcceleratorRequest(proto.Message):
to find a valid combination. TPUs are not supported.
"""
- name = proto.Field(proto.STRING, number=1)
-
+ name = proto.Field(proto.STRING, number=1,)
type_ = proto.Field(
proto.ENUM, number=2, enum=gcn_instance.Instance.AcceleratorType,
)
-
- core_count = proto.Field(proto.INT64, number=3)
+ core_count = proto.Field(proto.INT64, number=3,)
class SetInstanceMachineTypeRequest(proto.Message):
r"""Request for setting instance machine type.
-
Attributes:
name (str):
Required. Format:
@@ -239,75 +212,68 @@ class SetInstanceMachineTypeRequest(proto.Message):
type `__.
"""
- name = proto.Field(proto.STRING, number=1)
-
- machine_type = proto.Field(proto.STRING, number=2)
+ name = proto.Field(proto.STRING, number=1,)
+ machine_type = proto.Field(proto.STRING, number=2,)
class SetInstanceLabelsRequest(proto.Message):
r"""Request for setting instance labels.
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
- labels (Sequence[~.service.SetInstanceLabelsRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.notebooks_v1beta1.types.SetInstanceLabelsRequest.LabelsEntry]):
Labels to apply to this instance.
These can be later modified by the setLabels
method
"""
- name = proto.Field(proto.STRING, number=1)
-
- labels = proto.MapField(proto.STRING, proto.STRING, number=2)
+ name = proto.Field(proto.STRING, number=1,)
+ labels = proto.MapField(proto.STRING, proto.STRING, number=2,)
class DeleteInstanceRequest(proto.Message):
r"""Request for deleting a notebook instance.
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class StartInstanceRequest(proto.Message):
r"""Request for starting a notebook instance
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class StopInstanceRequest(proto.Message):
r"""Request for stopping a notebook instance
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class ResetInstanceRequest(proto.Message):
r"""Request for reseting a notebook instance
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class ReportInstanceInfoRequest(proto.Message):
@@ -323,33 +289,29 @@ class ReportInstanceInfoRequest(proto.Message):
authenticating the VM.
https://cloud.google.com/compute/docs/instances/verifying-
instance-identity
- metadata (Sequence[~.service.ReportInstanceInfoRequest.MetadataEntry]):
+ metadata (Sequence[google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest.MetadataEntry]):
The metadata reported to Notebooks API. This
will be merged to the instance metadata store
"""
- name = proto.Field(proto.STRING, number=1)
-
- vm_id = proto.Field(proto.STRING, number=2)
-
- metadata = proto.MapField(proto.STRING, proto.STRING, number=3)
+ name = proto.Field(proto.STRING, number=1,)
+ vm_id = proto.Field(proto.STRING, number=2,)
+ metadata = proto.MapField(proto.STRING, proto.STRING, number=3,)
class IsInstanceUpgradeableRequest(proto.Message):
r"""Request for checking if a notebook instance is upgradeable.
-
Attributes:
notebook_instance (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- notebook_instance = proto.Field(proto.STRING, number=1)
+ notebook_instance = proto.Field(proto.STRING, number=1,)
class IsInstanceUpgradeableResponse(proto.Message):
r"""Response for checking if a notebook instance is upgradeable.
-
Attributes:
upgradeable (bool):
If an instance is upgradeable.
@@ -361,28 +323,24 @@ class IsInstanceUpgradeableResponse(proto.Message):
Additional information about upgrade.
"""
- upgradeable = proto.Field(proto.BOOL, number=1)
-
- upgrade_version = proto.Field(proto.STRING, number=2)
-
- upgrade_info = proto.Field(proto.STRING, number=3)
+ upgradeable = proto.Field(proto.BOOL, number=1,)
+ upgrade_version = proto.Field(proto.STRING, number=2,)
+ upgrade_info = proto.Field(proto.STRING, number=3,)
class UpgradeInstanceRequest(proto.Message):
r"""Request for upgrading a notebook instance
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class UpgradeInstanceInternalRequest(proto.Message):
r"""Request for upgrading a notebook instance from within the VM
-
Attributes:
name (str):
Required. Format:
@@ -394,14 +352,12 @@ class UpgradeInstanceInternalRequest(proto.Message):
instance-identity
"""
- name = proto.Field(proto.STRING, number=1)
-
- vm_id = proto.Field(proto.STRING, number=2)
+ name = proto.Field(proto.STRING, number=1,)
+ vm_id = proto.Field(proto.STRING, number=2,)
class ListEnvironmentsRequest(proto.Message):
r"""Request for listing environments.
-
Attributes:
parent (str):
Required. Format:
@@ -413,18 +369,15 @@ class ListEnvironmentsRequest(proto.Message):
used to continue listing from the last result.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- page_size = proto.Field(proto.INT32, number=2)
-
- page_token = proto.Field(proto.STRING, number=3)
+ parent = proto.Field(proto.STRING, number=1,)
+ page_size = proto.Field(proto.INT32, number=2,)
+ page_token = proto.Field(proto.STRING, number=3,)
class ListEnvironmentsResponse(proto.Message):
r"""Response for listing environments.
-
Attributes:
- environments (Sequence[~.gcn_environment.Environment]):
+ environments (Sequence[google.cloud.notebooks_v1beta1.types.Environment]):
A list of returned environments.
next_page_token (str):
A page token that can be used to continue
@@ -441,27 +394,23 @@ def raw_page(self):
environments = proto.RepeatedField(
proto.MESSAGE, number=1, message=gcn_environment.Environment,
)
-
- next_page_token = proto.Field(proto.STRING, number=2)
-
- unreachable = proto.RepeatedField(proto.STRING, number=3)
+ next_page_token = proto.Field(proto.STRING, number=2,)
+ unreachable = proto.RepeatedField(proto.STRING, number=3,)
class GetEnvironmentRequest(proto.Message):
r"""Request for getting a notebook environment.
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/environments/{environment_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
class CreateEnvironmentRequest(proto.Message):
r"""Request for creating a notebook environment.
-
Attributes:
parent (str):
Required. Format:
@@ -472,14 +421,12 @@ class CreateEnvironmentRequest(proto.Message):
contain only lowercase letters, numeric characters, and
dashes. The first character must be a lowercase letter and
the last character cannot be a dash.
- environment (~.gcn_environment.Environment):
+ environment (google.cloud.notebooks_v1beta1.types.Environment):
Required. The environment to be created.
"""
- parent = proto.Field(proto.STRING, number=1)
-
- environment_id = proto.Field(proto.STRING, number=2)
-
+ parent = proto.Field(proto.STRING, number=1,)
+ environment_id = proto.Field(proto.STRING, number=2,)
environment = proto.Field(
proto.MESSAGE, number=3, message=gcn_environment.Environment,
)
@@ -487,14 +434,13 @@ class CreateEnvironmentRequest(proto.Message):
class DeleteEnvironmentRequest(proto.Message):
r"""Request for deleting a notebook environment.
-
Attributes:
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/environments/{environment_id}``
"""
- name = proto.Field(proto.STRING, number=1)
+ name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index a57e24b..03aa2f5 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -18,6 +18,7 @@
from __future__ import absolute_import
import os
+import pathlib
import shutil
import nox
@@ -30,6 +31,22 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
@@ -45,16 +62,9 @@ def lint(session):
session.run("flake8", "google", "tests")
-@nox.session(python="3.6")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def blacken(session):
- """Run black.
-
- Format code to uniform standard.
-
- This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
- That run uses an image that doesn't have 3.6 installed. Before updating this
- check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
- """
+ """Run black. Format code to uniform standard."""
session.install(BLACK_VERSION)
session.run(
"black", *BLACK_PATHS,
@@ -70,17 +80,21 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
- session.install("asyncmock", "pytest-asyncio")
- session.install(
- "mock", "pytest", "pytest-cov",
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
- session.install("-e", ".")
+ session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
+
+ session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
@@ -101,15 +115,18 @@ def unit(session):
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
# Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
- # Sanity check: Only run tests if the environment variable is set.
- if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
- session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -122,16 +139,26 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install(
- "mock", "pytest", "google-cloud-testutils",
- )
- session.install("-e", ".")
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -142,7 +169,7 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=99")
+ session.run("coverage", "report", "--show-missing", "--fail-under=98")
session.run("coverage", "erase")
@@ -152,7 +179,7 @@ def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark")
+ session.install("sphinx==4.0.1", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -174,9 +201,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
- # https://github.com/docascode/sphinx-docfx-yaml/issues/97
- session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ session.install(
+ "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+ )
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/synth.py b/owlbot.py
similarity index 68%
rename from synth.py
rename to owlbot.py
index a77bad9..208b5ee 100644
--- a/synth.py
+++ b/owlbot.py
@@ -19,25 +19,19 @@
import synthtool.gcp as gcp
from synthtool.languages import python
-gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
-# ----------------------------------------------------------------------------
-# Generate notebooks GAPIC layer
-# ----------------------------------------------------------------------------
-library = gapic.py_library(
- service="notebooks",
- version="v1beta1",
- bazel_target="//google/cloud/notebooks/v1beta1:notebooks-v1beta1-py",
+default_version = "v1beta1"
-)
+for library in s.get_staging_dirs(default_version):
+ s.move(library, excludes=["scripts/fixup*.py", "setup.py", "README.rst", "docs/index.rst"])
-s.move(library, excludes=["scripts/fixup*.py", "setup.py", "README.rst", "docs/index.rst"])
+s.remove_staging_dirs()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=99, microgenerator=True)
+templated_files = common.py_library(cov_level=98, microgenerator=True)
s.move(
templated_files, excludes=[".coveragerc"]
) # the microgenerator has a good coveragerc file
diff --git a/renovate.json b/renovate.json
index 4fa9493..c048955 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,9 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"],
+ "pip_requirements": {
+ "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
+ }
}
diff --git a/setup.py b/setup.py
index 1f140be..6a2c301 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
import os
import setuptools # type: ignore
-version = "0.1.2"
+version = "0.2.0"
package_root = os.path.abspath(os.path.dirname(__file__))
@@ -40,8 +40,9 @@
platforms="Posix; MacOS X; Windows",
include_package_data=True,
install_requires=(
- "google-api-core[grpc] >= 1.22.0, < 2.0.0dev",
+ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
"proto-plus >= 1.1.0",
+ "packaging >= 14.3",
),
python_requires=">=3.6",
classifiers=[
diff --git a/synth.metadata b/synth.metadata
deleted file mode 100644
index 9109ecc..0000000
--- a/synth.metadata
+++ /dev/null
@@ -1,132 +0,0 @@
-{
- "sources": [
- {
- "git": {
- "name": ".",
- "remote": "https://github.com/googleapis/python-notebooks.git",
- "sha": "7633ad32ba52781688961fa78aecb6fa0b5f4e3e"
- }
- },
- {
- "git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907",
- "internalRef": "347055288"
- }
- },
- {
- "git": {
- "name": "synthtool",
- "remote": "https://github.com/googleapis/synthtool.git",
- "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8"
- }
- },
- {
- "git": {
- "name": "synthtool",
- "remote": "https://github.com/googleapis/synthtool.git",
- "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8"
- }
- }
- ],
- "destinations": [
- {
- "client": {
- "source": "googleapis",
- "apiName": "notebooks",
- "apiVersion": "v1beta1",
- "language": "python",
- "generator": "bazel"
- }
- }
- ],
- "generatedFiles": [
- ".flake8",
- ".github/CONTRIBUTING.md",
- ".github/ISSUE_TEMPLATE/bug_report.md",
- ".github/ISSUE_TEMPLATE/feature_request.md",
- ".github/ISSUE_TEMPLATE/support_request.md",
- ".github/PULL_REQUEST_TEMPLATE.md",
- ".github/release-please.yml",
- ".github/snippet-bot.yml",
- ".gitignore",
- ".kokoro/build.sh",
- ".kokoro/continuous/common.cfg",
- ".kokoro/continuous/continuous.cfg",
- ".kokoro/docker/docs/Dockerfile",
- ".kokoro/docker/docs/fetch_gpg_keys.sh",
- ".kokoro/docs/common.cfg",
- ".kokoro/docs/docs-presubmit.cfg",
- ".kokoro/docs/docs.cfg",
- ".kokoro/populate-secrets.sh",
- ".kokoro/presubmit/common.cfg",
- ".kokoro/presubmit/presubmit.cfg",
- ".kokoro/publish-docs.sh",
- ".kokoro/release.sh",
- ".kokoro/release/common.cfg",
- ".kokoro/release/release.cfg",
- ".kokoro/samples/lint/common.cfg",
- ".kokoro/samples/lint/continuous.cfg",
- ".kokoro/samples/lint/periodic.cfg",
- ".kokoro/samples/lint/presubmit.cfg",
- ".kokoro/samples/python3.6/common.cfg",
- ".kokoro/samples/python3.6/continuous.cfg",
- ".kokoro/samples/python3.6/periodic.cfg",
- ".kokoro/samples/python3.6/presubmit.cfg",
- ".kokoro/samples/python3.7/common.cfg",
- ".kokoro/samples/python3.7/continuous.cfg",
- ".kokoro/samples/python3.7/periodic.cfg",
- ".kokoro/samples/python3.7/presubmit.cfg",
- ".kokoro/samples/python3.8/common.cfg",
- ".kokoro/samples/python3.8/continuous.cfg",
- ".kokoro/samples/python3.8/periodic.cfg",
- ".kokoro/samples/python3.8/presubmit.cfg",
- ".kokoro/test-samples.sh",
- ".kokoro/trampoline.sh",
- ".kokoro/trampoline_v2.sh",
- ".pre-commit-config.yaml",
- ".trampolinerc",
- "CODE_OF_CONDUCT.md",
- "CONTRIBUTING.rst",
- "LICENSE",
- "MANIFEST.in",
- "docs/_static/custom.css",
- "docs/_templates/layout.html",
- "docs/conf.py",
- "docs/multiprocessing.rst",
- "docs/notebooks_v1beta1/services.rst",
- "docs/notebooks_v1beta1/types.rst",
- "google/cloud/notebooks/__init__.py",
- "google/cloud/notebooks/py.typed",
- "google/cloud/notebooks_v1beta1/__init__.py",
- "google/cloud/notebooks_v1beta1/py.typed",
- "google/cloud/notebooks_v1beta1/services/__init__.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/client.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py",
- "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py",
- "google/cloud/notebooks_v1beta1/types/__init__.py",
- "google/cloud/notebooks_v1beta1/types/environment.py",
- "google/cloud/notebooks_v1beta1/types/instance.py",
- "google/cloud/notebooks_v1beta1/types/service.py",
- "mypy.ini",
- "noxfile.py",
- "renovate.json",
- "scripts/decrypt-secrets.sh",
- "scripts/readme-gen/readme_gen.py",
- "scripts/readme-gen/templates/README.tmpl.rst",
- "scripts/readme-gen/templates/auth.tmpl.rst",
- "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
- "scripts/readme-gen/templates/install_deps.tmpl.rst",
- "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
- "setup.cfg",
- "testing/.gitignore",
- "tests/unit/gapic/notebooks_v1beta1/__init__.py",
- "tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py"
- ]
-}
\ No newline at end of file
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
new file mode 100644
index 0000000..e69de29
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
new file mode 100644
index 0000000..e69de29
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
new file mode 100644
index 0000000..4fc6a86
--- /dev/null
+++ b/testing/constraints-3.6.txt
@@ -0,0 +1,10 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+google-api-core==1.22.2
+proto-plus==1.1.0
+packaging==14.3
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
new file mode 100644
index 0000000..e69de29
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
new file mode 100644
index 0000000..e69de29
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
new file mode 100644
index 0000000..e69de29
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/unit/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/unit/gapic/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/notebooks_v1beta1/__init__.py b/tests/unit/gapic/notebooks_v1beta1/__init__.py
index 8b13789..4de6597 100644
--- a/tests/unit/gapic/notebooks_v1beta1/__init__.py
+++ b/tests/unit/gapic/notebooks_v1beta1/__init__.py
@@ -1 +1,15 @@
-
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py
index d0c6432..d09166d 100644
--- a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py
+++ b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py
@@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,9 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
import os
import mock
+import packaging.version
import grpc
from grpc.experimental import aio
@@ -24,16 +23,16 @@
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
-from google import auth
+
from google.api_core import client_options
-from google.api_core import exceptions
+from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
-from google.auth import credentials
+from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.notebooks_v1beta1.services.notebook_service import (
NotebookServiceAsyncClient,
@@ -43,12 +42,42 @@
)
from google.cloud.notebooks_v1beta1.services.notebook_service import pagers
from google.cloud.notebooks_v1beta1.services.notebook_service import transports
+from google.cloud.notebooks_v1beta1.services.notebook_service.transports.base import (
+ _API_CORE_VERSION,
+)
+from google.cloud.notebooks_v1beta1.services.notebook_service.transports.base import (
+ _GOOGLE_AUTH_VERSION,
+)
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
from google.longrunning import operations_pb2
from google.oauth2 import service_account
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+import google.auth
+
+
+# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
+# - Delete all the api-core and auth "less than" test cases
+# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
+requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth < 1.25.0",
+)
+requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth >= 1.25.0",
+)
+
+requires_api_core_lt_1_26_0 = pytest.mark.skipif(
+ packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
+ reason="This test requires google-api-core < 1.26.0",
+)
+
+requires_api_core_gte_1_26_0 = pytest.mark.skipif(
+ packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
+ reason="This test requires google-api-core >= 1.26.0",
+)
def client_cert_source_callback():
@@ -96,26 +125,48 @@ def test__get_default_mtls_endpoint():
@pytest.mark.parametrize(
- "client_class", [NotebookServiceClient, NotebookServiceAsyncClient]
+ "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,]
+)
+def test_notebook_service_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "notebooks.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,]
)
def test_notebook_service_client_from_service_account_file(client_class):
- creds = credentials.AnonymousCredentials()
+ creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "notebooks.googleapis.com:443"
def test_notebook_service_client_get_transport_class():
transport = NotebookServiceClient.get_transport_class()
- assert transport == transports.NotebookServiceGrpcTransport
+ available_transports = [
+ transports.NotebookServiceGrpcTransport,
+ ]
+ assert transport in available_transports
transport = NotebookServiceClient.get_transport_class("grpc")
assert transport == transports.NotebookServiceGrpcTransport
@@ -147,7 +198,7 @@ def test_notebook_service_client_client_options(
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(NotebookServiceClient, "get_transport_class") as gtc:
- transport = transport_class(credentials=credentials.AnonymousCredentials())
+ transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
@@ -166,7 +217,7 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -182,7 +233,7 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -198,7 +249,7 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -226,7 +277,7 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -287,29 +338,25 @@ def test_notebook_service_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -318,66 +365,53 @@ def test_notebook_service_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -403,7 +437,7 @@ def test_notebook_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -433,7 +467,7 @@ def test_notebook_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -452,7 +486,7 @@ def test_notebook_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -462,7 +496,7 @@ def test_list_instances(
transport: str = "grpc", request_type=service.ListInstancesRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -475,21 +509,16 @@ def test_list_instances(
call.return_value = service.ListInstancesResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
-
response = client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ListInstancesRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListInstancesPager)
-
assert response.next_page_token == "next_page_token_value"
-
assert response.unreachable == ["unreachable_value"]
@@ -497,12 +526,27 @@ def test_list_instances_from_dict():
test_list_instances(request_type=dict)
+def test_list_instances_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
+ client.list_instances()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.ListInstancesRequest()
+
+
@pytest.mark.asyncio
async def test_list_instances_async(
transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -518,20 +562,16 @@ async def test_list_instances_async(
unreachable=["unreachable_value"],
)
)
-
response = await client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ListInstancesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListInstancesAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
-
assert response.unreachable == ["unreachable_value"]
@@ -541,17 +581,17 @@ async def test_list_instances_async_from_dict():
def test_list_instances_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ListInstancesRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
call.return_value = service.ListInstancesResponse()
-
client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
@@ -566,11 +606,14 @@ def test_list_instances_field_headers():
@pytest.mark.asyncio
async def test_list_instances_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ListInstancesRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -578,7 +621,6 @@ async def test_list_instances_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.ListInstancesResponse()
)
-
await client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
@@ -592,7 +634,7 @@ async def test_list_instances_field_headers_async():
def test_list_instances_pager():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
@@ -630,7 +672,7 @@ def test_list_instances_pager():
def test_list_instances_pages():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
@@ -660,7 +702,9 @@ def test_list_instances_pages():
@pytest.mark.asyncio
async def test_list_instances_async_pager():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -697,7 +741,9 @@ async def test_list_instances_async_pager():
@pytest.mark.asyncio
async def test_list_instances_async_pages():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -731,7 +777,7 @@ async def test_list_instances_async_pages():
def test_get_instance(transport: str = "grpc", request_type=service.GetInstanceRequest):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -764,57 +810,34 @@ def test_get_instance(transport: str = "grpc", request_type=service.GetInstanceR
subnet="subnet_value",
vm_image=environment.VmImage(project="project_value"),
)
-
response = client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.GetInstanceRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, instance.Instance)
-
assert response.name == "name_value"
-
assert response.post_startup_script == "post_startup_script_value"
-
assert response.proxy_uri == "proxy_uri_value"
-
assert response.instance_owners == ["instance_owners_value"]
-
assert response.service_account == "service_account_value"
-
assert response.machine_type == "machine_type_value"
-
assert response.state == instance.Instance.State.STARTING
-
assert response.install_gpu_driver is True
-
assert response.custom_gpu_driver_path == "custom_gpu_driver_path_value"
-
assert response.boot_disk_type == instance.Instance.DiskType.PD_STANDARD
-
assert response.boot_disk_size_gb == 1792
-
assert response.data_disk_type == instance.Instance.DiskType.PD_STANDARD
-
assert response.data_disk_size_gb == 1766
-
assert response.no_remove_data_disk is True
-
assert response.disk_encryption == instance.Instance.DiskEncryption.GMEK
-
assert response.kms_key == "kms_key_value"
-
assert response.no_public_ip is True
-
assert response.no_proxy_access is True
-
assert response.network == "network_value"
-
assert response.subnet == "subnet_value"
@@ -822,12 +845,27 @@ def test_get_instance_from_dict():
test_get_instance(request_type=dict)
+def test_get_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
+ client.get_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.GetInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_get_instance_async(
transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -861,56 +899,34 @@ async def test_get_instance_async(
subnet="subnet_value",
)
)
-
response = await client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.GetInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, instance.Instance)
-
assert response.name == "name_value"
-
assert response.post_startup_script == "post_startup_script_value"
-
assert response.proxy_uri == "proxy_uri_value"
-
assert response.instance_owners == ["instance_owners_value"]
-
assert response.service_account == "service_account_value"
-
assert response.machine_type == "machine_type_value"
-
assert response.state == instance.Instance.State.STARTING
-
assert response.install_gpu_driver is True
-
assert response.custom_gpu_driver_path == "custom_gpu_driver_path_value"
-
assert response.boot_disk_type == instance.Instance.DiskType.PD_STANDARD
-
assert response.boot_disk_size_gb == 1792
-
assert response.data_disk_type == instance.Instance.DiskType.PD_STANDARD
-
assert response.data_disk_size_gb == 1766
-
assert response.no_remove_data_disk is True
-
assert response.disk_encryption == instance.Instance.DiskEncryption.GMEK
-
assert response.kms_key == "kms_key_value"
-
assert response.no_public_ip is True
-
assert response.no_proxy_access is True
-
assert response.network == "network_value"
-
assert response.subnet == "subnet_value"
@@ -920,17 +936,17 @@ async def test_get_instance_async_from_dict():
def test_get_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.GetInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
call.return_value = instance.Instance()
-
client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -945,17 +961,19 @@ def test_get_instance_field_headers():
@pytest.mark.asyncio
async def test_get_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.GetInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance())
-
await client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -972,7 +990,7 @@ def test_create_instance(
transport: str = "grpc", request_type=service.CreateInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -983,13 +1001,11 @@ def test_create_instance(
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.CreateInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1000,12 +1016,27 @@ def test_create_instance_from_dict():
test_create_instance(request_type=dict)
+def test_create_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
+ client.create_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.CreateInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_create_instance_async(
transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1018,13 +1049,11 @@ async def test_create_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.CreateInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1037,17 +1066,17 @@ async def test_create_instance_async_from_dict():
def test_create_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.CreateInstanceRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1062,11 +1091,14 @@ def test_create_instance_field_headers():
@pytest.mark.asyncio
async def test_create_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.CreateInstanceRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1074,7 +1106,6 @@ async def test_create_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1091,7 +1122,7 @@ def test_register_instance(
transport: str = "grpc", request_type=service.RegisterInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1104,13 +1135,11 @@ def test_register_instance(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.register_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.RegisterInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1121,12 +1150,29 @@ def test_register_instance_from_dict():
test_register_instance(request_type=dict)
+def test_register_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.register_instance), "__call__"
+ ) as call:
+ client.register_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.RegisterInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_register_instance_async(
transport: str = "grpc_asyncio", request_type=service.RegisterInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1141,13 +1187,11 @@ async def test_register_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.register_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.RegisterInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1160,11 +1204,12 @@ async def test_register_instance_async_from_dict():
def test_register_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.RegisterInstanceRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1172,7 +1217,6 @@ def test_register_instance_field_headers():
type(client.transport.register_instance), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.register_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1187,11 +1231,14 @@ def test_register_instance_field_headers():
@pytest.mark.asyncio
async def test_register_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.RegisterInstanceRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1201,7 +1248,6 @@ async def test_register_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.register_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1218,7 +1264,7 @@ def test_set_instance_accelerator(
transport: str = "grpc", request_type=service.SetInstanceAcceleratorRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1231,13 +1277,11 @@ def test_set_instance_accelerator(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.set_instance_accelerator(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.SetInstanceAcceleratorRequest()
# Establish that the response is the type that we expect.
@@ -1248,12 +1292,29 @@ def test_set_instance_accelerator_from_dict():
test_set_instance_accelerator(request_type=dict)
+def test_set_instance_accelerator_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.set_instance_accelerator), "__call__"
+ ) as call:
+ client.set_instance_accelerator()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.SetInstanceAcceleratorRequest()
+
+
@pytest.mark.asyncio
async def test_set_instance_accelerator_async(
transport: str = "grpc_asyncio", request_type=service.SetInstanceAcceleratorRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1268,13 +1329,11 @@ async def test_set_instance_accelerator_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.set_instance_accelerator(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.SetInstanceAcceleratorRequest()
# Establish that the response is the type that we expect.
@@ -1287,11 +1346,12 @@ async def test_set_instance_accelerator_async_from_dict():
def test_set_instance_accelerator_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.SetInstanceAcceleratorRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1299,7 +1359,6 @@ def test_set_instance_accelerator_field_headers():
type(client.transport.set_instance_accelerator), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.set_instance_accelerator(request)
# Establish that the underlying gRPC stub method was called.
@@ -1314,11 +1373,14 @@ def test_set_instance_accelerator_field_headers():
@pytest.mark.asyncio
async def test_set_instance_accelerator_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.SetInstanceAcceleratorRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1328,7 +1390,6 @@ async def test_set_instance_accelerator_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.set_instance_accelerator(request)
# Establish that the underlying gRPC stub method was called.
@@ -1345,7 +1406,7 @@ def test_set_instance_machine_type(
transport: str = "grpc", request_type=service.SetInstanceMachineTypeRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1358,13 +1419,11 @@ def test_set_instance_machine_type(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.set_instance_machine_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.SetInstanceMachineTypeRequest()
# Establish that the response is the type that we expect.
@@ -1375,12 +1434,29 @@ def test_set_instance_machine_type_from_dict():
test_set_instance_machine_type(request_type=dict)
+def test_set_instance_machine_type_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.set_instance_machine_type), "__call__"
+ ) as call:
+ client.set_instance_machine_type()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.SetInstanceMachineTypeRequest()
+
+
@pytest.mark.asyncio
async def test_set_instance_machine_type_async(
transport: str = "grpc_asyncio", request_type=service.SetInstanceMachineTypeRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1395,13 +1471,11 @@ async def test_set_instance_machine_type_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.set_instance_machine_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.SetInstanceMachineTypeRequest()
# Establish that the response is the type that we expect.
@@ -1414,11 +1488,12 @@ async def test_set_instance_machine_type_async_from_dict():
def test_set_instance_machine_type_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.SetInstanceMachineTypeRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1426,7 +1501,6 @@ def test_set_instance_machine_type_field_headers():
type(client.transport.set_instance_machine_type), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.set_instance_machine_type(request)
# Establish that the underlying gRPC stub method was called.
@@ -1441,11 +1515,14 @@ def test_set_instance_machine_type_field_headers():
@pytest.mark.asyncio
async def test_set_instance_machine_type_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.SetInstanceMachineTypeRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1455,7 +1532,6 @@ async def test_set_instance_machine_type_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.set_instance_machine_type(request)
# Establish that the underlying gRPC stub method was called.
@@ -1472,7 +1548,7 @@ def test_set_instance_labels(
transport: str = "grpc", request_type=service.SetInstanceLabelsRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1485,13 +1561,11 @@ def test_set_instance_labels(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.set_instance_labels(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.SetInstanceLabelsRequest()
# Establish that the response is the type that we expect.
@@ -1502,12 +1576,29 @@ def test_set_instance_labels_from_dict():
test_set_instance_labels(request_type=dict)
+def test_set_instance_labels_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.set_instance_labels), "__call__"
+ ) as call:
+ client.set_instance_labels()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.SetInstanceLabelsRequest()
+
+
@pytest.mark.asyncio
async def test_set_instance_labels_async(
transport: str = "grpc_asyncio", request_type=service.SetInstanceLabelsRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1522,13 +1613,11 @@ async def test_set_instance_labels_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.set_instance_labels(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.SetInstanceLabelsRequest()
# Establish that the response is the type that we expect.
@@ -1541,11 +1630,12 @@ async def test_set_instance_labels_async_from_dict():
def test_set_instance_labels_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.SetInstanceLabelsRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1553,7 +1643,6 @@ def test_set_instance_labels_field_headers():
type(client.transport.set_instance_labels), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.set_instance_labels(request)
# Establish that the underlying gRPC stub method was called.
@@ -1568,11 +1657,14 @@ def test_set_instance_labels_field_headers():
@pytest.mark.asyncio
async def test_set_instance_labels_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.SetInstanceLabelsRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1582,7 +1674,6 @@ async def test_set_instance_labels_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.set_instance_labels(request)
# Establish that the underlying gRPC stub method was called.
@@ -1599,7 +1690,7 @@ def test_delete_instance(
transport: str = "grpc", request_type=service.DeleteInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1610,13 +1701,11 @@ def test_delete_instance(
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.DeleteInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1627,12 +1716,27 @@ def test_delete_instance_from_dict():
test_delete_instance(request_type=dict)
+def test_delete_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
+ client.delete_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.DeleteInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_delete_instance_async(
transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1645,13 +1749,11 @@ async def test_delete_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.DeleteInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1664,17 +1766,17 @@ async def test_delete_instance_async_from_dict():
def test_delete_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.DeleteInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1689,11 +1791,14 @@ def test_delete_instance_field_headers():
@pytest.mark.asyncio
async def test_delete_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.DeleteInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1701,7 +1806,6 @@ async def test_delete_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1718,7 +1822,7 @@ def test_start_instance(
transport: str = "grpc", request_type=service.StartInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1729,13 +1833,11 @@ def test_start_instance(
with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.start_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.StartInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1746,12 +1848,27 @@ def test_start_instance_from_dict():
test_start_instance(request_type=dict)
+def test_start_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
+ client.start_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.StartInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_start_instance_async(
transport: str = "grpc_asyncio", request_type=service.StartInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1764,13 +1881,11 @@ async def test_start_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.start_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.StartInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1783,17 +1898,17 @@ async def test_start_instance_async_from_dict():
def test_start_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.StartInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.start_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1808,11 +1923,14 @@ def test_start_instance_field_headers():
@pytest.mark.asyncio
async def test_start_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.StartInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1820,7 +1938,6 @@ async def test_start_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.start_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1837,7 +1954,7 @@ def test_stop_instance(
transport: str = "grpc", request_type=service.StopInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1848,13 +1965,11 @@ def test_stop_instance(
with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.stop_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.StopInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1865,12 +1980,27 @@ def test_stop_instance_from_dict():
test_stop_instance(request_type=dict)
+def test_stop_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
+ client.stop_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.StopInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_stop_instance_async(
transport: str = "grpc_asyncio", request_type=service.StopInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1883,13 +2013,11 @@ async def test_stop_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.stop_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.StopInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1902,17 +2030,17 @@ async def test_stop_instance_async_from_dict():
def test_stop_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.StopInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.stop_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1927,11 +2055,14 @@ def test_stop_instance_field_headers():
@pytest.mark.asyncio
async def test_stop_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.StopInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1939,7 +2070,6 @@ async def test_stop_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.stop_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -1956,7 +2086,7 @@ def test_reset_instance(
transport: str = "grpc", request_type=service.ResetInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -1967,13 +2097,11 @@ def test_reset_instance(
with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.reset_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ResetInstanceRequest()
# Establish that the response is the type that we expect.
@@ -1984,12 +2112,27 @@ def test_reset_instance_from_dict():
test_reset_instance(request_type=dict)
+def test_reset_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
+ client.reset_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.ResetInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_reset_instance_async(
transport: str = "grpc_asyncio", request_type=service.ResetInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2002,13 +2145,11 @@ async def test_reset_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.reset_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ResetInstanceRequest()
# Establish that the response is the type that we expect.
@@ -2021,17 +2162,17 @@ async def test_reset_instance_async_from_dict():
def test_reset_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ResetInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.reset_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -2046,11 +2187,14 @@ def test_reset_instance_field_headers():
@pytest.mark.asyncio
async def test_reset_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ResetInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2058,7 +2202,6 @@ async def test_reset_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.reset_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -2075,7 +2218,7 @@ def test_report_instance_info(
transport: str = "grpc", request_type=service.ReportInstanceInfoRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2088,13 +2231,11 @@ def test_report_instance_info(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.report_instance_info(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ReportInstanceInfoRequest()
# Establish that the response is the type that we expect.
@@ -2105,12 +2246,29 @@ def test_report_instance_info_from_dict():
test_report_instance_info(request_type=dict)
+def test_report_instance_info_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.report_instance_info), "__call__"
+ ) as call:
+ client.report_instance_info()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.ReportInstanceInfoRequest()
+
+
@pytest.mark.asyncio
async def test_report_instance_info_async(
transport: str = "grpc_asyncio", request_type=service.ReportInstanceInfoRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2125,13 +2283,11 @@ async def test_report_instance_info_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.report_instance_info(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ReportInstanceInfoRequest()
# Establish that the response is the type that we expect.
@@ -2144,11 +2300,12 @@ async def test_report_instance_info_async_from_dict():
def test_report_instance_info_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ReportInstanceInfoRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2156,7 +2313,6 @@ def test_report_instance_info_field_headers():
type(client.transport.report_instance_info), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.report_instance_info(request)
# Establish that the underlying gRPC stub method was called.
@@ -2171,11 +2327,14 @@ def test_report_instance_info_field_headers():
@pytest.mark.asyncio
async def test_report_instance_info_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ReportInstanceInfoRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2185,7 +2344,6 @@ async def test_report_instance_info_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.report_instance_info(request)
# Establish that the underlying gRPC stub method was called.
@@ -2202,7 +2360,7 @@ def test_is_instance_upgradeable(
transport: str = "grpc", request_type=service.IsInstanceUpgradeableRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2219,23 +2377,17 @@ def test_is_instance_upgradeable(
upgrade_version="upgrade_version_value",
upgrade_info="upgrade_info_value",
)
-
response = client.is_instance_upgradeable(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.IsInstanceUpgradeableRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, service.IsInstanceUpgradeableResponse)
-
assert response.upgradeable is True
-
assert response.upgrade_version == "upgrade_version_value"
-
assert response.upgrade_info == "upgrade_info_value"
@@ -2243,12 +2395,29 @@ def test_is_instance_upgradeable_from_dict():
test_is_instance_upgradeable(request_type=dict)
+def test_is_instance_upgradeable_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.is_instance_upgradeable), "__call__"
+ ) as call:
+ client.is_instance_upgradeable()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.IsInstanceUpgradeableRequest()
+
+
@pytest.mark.asyncio
async def test_is_instance_upgradeable_async(
transport: str = "grpc_asyncio", request_type=service.IsInstanceUpgradeableRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2267,22 +2436,17 @@ async def test_is_instance_upgradeable_async(
upgrade_info="upgrade_info_value",
)
)
-
response = await client.is_instance_upgradeable(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.IsInstanceUpgradeableRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, service.IsInstanceUpgradeableResponse)
-
assert response.upgradeable is True
-
assert response.upgrade_version == "upgrade_version_value"
-
assert response.upgrade_info == "upgrade_info_value"
@@ -2292,11 +2456,12 @@ async def test_is_instance_upgradeable_async_from_dict():
def test_is_instance_upgradeable_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.IsInstanceUpgradeableRequest()
+
request.notebook_instance = "notebook_instance/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2304,7 +2469,6 @@ def test_is_instance_upgradeable_field_headers():
type(client.transport.is_instance_upgradeable), "__call__"
) as call:
call.return_value = service.IsInstanceUpgradeableResponse()
-
client.is_instance_upgradeable(request)
# Establish that the underlying gRPC stub method was called.
@@ -2322,11 +2486,14 @@ def test_is_instance_upgradeable_field_headers():
@pytest.mark.asyncio
async def test_is_instance_upgradeable_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.IsInstanceUpgradeableRequest()
+
request.notebook_instance = "notebook_instance/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2336,7 +2503,6 @@ async def test_is_instance_upgradeable_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.IsInstanceUpgradeableResponse()
)
-
await client.is_instance_upgradeable(request)
# Establish that the underlying gRPC stub method was called.
@@ -2356,7 +2522,7 @@ def test_upgrade_instance(
transport: str = "grpc", request_type=service.UpgradeInstanceRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2367,13 +2533,11 @@ def test_upgrade_instance(
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.UpgradeInstanceRequest()
# Establish that the response is the type that we expect.
@@ -2384,12 +2548,27 @@ def test_upgrade_instance_from_dict():
test_upgrade_instance(request_type=dict)
+def test_upgrade_instance_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
+ client.upgrade_instance()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.UpgradeInstanceRequest()
+
+
@pytest.mark.asyncio
async def test_upgrade_instance_async(
transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2402,13 +2581,11 @@ async def test_upgrade_instance_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.UpgradeInstanceRequest()
# Establish that the response is the type that we expect.
@@ -2421,17 +2598,17 @@ async def test_upgrade_instance_async_from_dict():
def test_upgrade_instance_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.UpgradeInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -2446,11 +2623,14 @@ def test_upgrade_instance_field_headers():
@pytest.mark.asyncio
async def test_upgrade_instance_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.UpgradeInstanceRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2458,7 +2638,6 @@ async def test_upgrade_instance_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
@@ -2475,7 +2654,7 @@ def test_upgrade_instance_internal(
transport: str = "grpc", request_type=service.UpgradeInstanceInternalRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2488,13 +2667,11 @@ def test_upgrade_instance_internal(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.upgrade_instance_internal(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.UpgradeInstanceInternalRequest()
# Establish that the response is the type that we expect.
@@ -2505,12 +2682,29 @@ def test_upgrade_instance_internal_from_dict():
test_upgrade_instance_internal(request_type=dict)
+def test_upgrade_instance_internal_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.upgrade_instance_internal), "__call__"
+ ) as call:
+ client.upgrade_instance_internal()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.UpgradeInstanceInternalRequest()
+
+
@pytest.mark.asyncio
async def test_upgrade_instance_internal_async(
transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceInternalRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2525,13 +2719,11 @@ async def test_upgrade_instance_internal_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.upgrade_instance_internal(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.UpgradeInstanceInternalRequest()
# Establish that the response is the type that we expect.
@@ -2544,11 +2736,12 @@ async def test_upgrade_instance_internal_async_from_dict():
def test_upgrade_instance_internal_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.UpgradeInstanceInternalRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2556,7 +2749,6 @@ def test_upgrade_instance_internal_field_headers():
type(client.transport.upgrade_instance_internal), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.upgrade_instance_internal(request)
# Establish that the underlying gRPC stub method was called.
@@ -2571,11 +2763,14 @@ def test_upgrade_instance_internal_field_headers():
@pytest.mark.asyncio
async def test_upgrade_instance_internal_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.UpgradeInstanceInternalRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2585,7 +2780,6 @@ async def test_upgrade_instance_internal_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.upgrade_instance_internal(request)
# Establish that the underlying gRPC stub method was called.
@@ -2602,7 +2796,7 @@ def test_list_environments(
transport: str = "grpc", request_type=service.ListEnvironmentsRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2617,21 +2811,16 @@ def test_list_environments(
call.return_value = service.ListEnvironmentsResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
-
response = client.list_environments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ListEnvironmentsRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, pagers.ListEnvironmentsPager)
-
assert response.next_page_token == "next_page_token_value"
-
assert response.unreachable == ["unreachable_value"]
@@ -2639,12 +2828,29 @@ def test_list_environments_from_dict():
test_list_environments(request_type=dict)
+def test_list_environments_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_environments), "__call__"
+ ) as call:
+ client.list_environments()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.ListEnvironmentsRequest()
+
+
@pytest.mark.asyncio
async def test_list_environments_async(
transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2662,20 +2868,16 @@ async def test_list_environments_async(
unreachable=["unreachable_value"],
)
)
-
response = await client.list_environments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.ListEnvironmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEnvironmentsAsyncPager)
-
assert response.next_page_token == "next_page_token_value"
-
assert response.unreachable == ["unreachable_value"]
@@ -2685,11 +2887,12 @@ async def test_list_environments_async_from_dict():
def test_list_environments_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ListEnvironmentsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2697,7 +2900,6 @@ def test_list_environments_field_headers():
type(client.transport.list_environments), "__call__"
) as call:
call.return_value = service.ListEnvironmentsResponse()
-
client.list_environments(request)
# Establish that the underlying gRPC stub method was called.
@@ -2712,11 +2914,14 @@ def test_list_environments_field_headers():
@pytest.mark.asyncio
async def test_list_environments_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.ListEnvironmentsRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2726,7 +2931,6 @@ async def test_list_environments_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.ListEnvironmentsResponse()
)
-
await client.list_environments(request)
# Establish that the underlying gRPC stub method was called.
@@ -2740,7 +2944,7 @@ async def test_list_environments_field_headers_async():
def test_list_environments_pager():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2780,7 +2984,7 @@ def test_list_environments_pager():
def test_list_environments_pages():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2812,7 +3016,9 @@ def test_list_environments_pages():
@pytest.mark.asyncio
async def test_list_environments_async_pager():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2851,7 +3057,9 @@ async def test_list_environments_async_pager():
@pytest.mark.asyncio
async def test_list_environments_async_pages():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
@@ -2889,7 +3097,7 @@ def test_get_environment(
transport: str = "grpc", request_type=service.GetEnvironmentRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2906,25 +3114,18 @@ def test_get_environment(
post_startup_script="post_startup_script_value",
vm_image=environment.VmImage(project="project_value"),
)
-
response = client.get_environment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.GetEnvironmentRequest()
# Establish that the response is the type that we expect.
-
assert isinstance(response, environment.Environment)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.description == "description_value"
-
assert response.post_startup_script == "post_startup_script_value"
@@ -2932,12 +3133,27 @@ def test_get_environment_from_dict():
test_get_environment(request_type=dict)
+def test_get_environment_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_environment), "__call__") as call:
+ client.get_environment()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.GetEnvironmentRequest()
+
+
@pytest.mark.asyncio
async def test_get_environment_async(
transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -2955,24 +3171,18 @@ async def test_get_environment_async(
post_startup_script="post_startup_script_value",
)
)
-
response = await client.get_environment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.GetEnvironmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, environment.Environment)
-
assert response.name == "name_value"
-
assert response.display_name == "display_name_value"
-
assert response.description == "description_value"
-
assert response.post_startup_script == "post_startup_script_value"
@@ -2982,17 +3192,17 @@ async def test_get_environment_async_from_dict():
def test_get_environment_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.GetEnvironmentRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_environment), "__call__") as call:
call.return_value = environment.Environment()
-
client.get_environment(request)
# Establish that the underlying gRPC stub method was called.
@@ -3007,11 +3217,14 @@ def test_get_environment_field_headers():
@pytest.mark.asyncio
async def test_get_environment_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.GetEnvironmentRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3019,7 +3232,6 @@ async def test_get_environment_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
environment.Environment()
)
-
await client.get_environment(request)
# Establish that the underlying gRPC stub method was called.
@@ -3036,7 +3248,7 @@ def test_create_environment(
transport: str = "grpc", request_type=service.CreateEnvironmentRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3049,13 +3261,11 @@ def test_create_environment(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.create_environment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.CreateEnvironmentRequest()
# Establish that the response is the type that we expect.
@@ -3066,12 +3276,29 @@ def test_create_environment_from_dict():
test_create_environment(request_type=dict)
+def test_create_environment_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_environment), "__call__"
+ ) as call:
+ client.create_environment()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.CreateEnvironmentRequest()
+
+
@pytest.mark.asyncio
async def test_create_environment_async(
transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3086,13 +3313,11 @@ async def test_create_environment_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.create_environment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.CreateEnvironmentRequest()
# Establish that the response is the type that we expect.
@@ -3105,11 +3330,12 @@ async def test_create_environment_async_from_dict():
def test_create_environment_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.CreateEnvironmentRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3117,7 +3343,6 @@ def test_create_environment_field_headers():
type(client.transport.create_environment), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.create_environment(request)
# Establish that the underlying gRPC stub method was called.
@@ -3132,11 +3357,14 @@ def test_create_environment_field_headers():
@pytest.mark.asyncio
async def test_create_environment_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.CreateEnvironmentRequest()
+
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3146,7 +3374,6 @@ async def test_create_environment_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.create_environment(request)
# Establish that the underlying gRPC stub method was called.
@@ -3163,7 +3390,7 @@ def test_delete_environment(
transport: str = "grpc", request_type=service.DeleteEnvironmentRequest
):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3176,13 +3403,11 @@ def test_delete_environment(
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
-
response = client.delete_environment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.DeleteEnvironmentRequest()
# Establish that the response is the type that we expect.
@@ -3193,12 +3418,29 @@ def test_delete_environment_from_dict():
test_delete_environment(request_type=dict)
+def test_delete_environment_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = NotebookServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_environment), "__call__"
+ ) as call:
+ client.delete_environment()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == service.DeleteEnvironmentRequest()
+
+
@pytest.mark.asyncio
async def test_delete_environment_async(
transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest
):
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
@@ -3213,13 +3455,11 @@ async def test_delete_environment_async(
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
-
response = await client.delete_environment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
-
assert args[0] == service.DeleteEnvironmentRequest()
# Establish that the response is the type that we expect.
@@ -3232,11 +3472,12 @@ async def test_delete_environment_async_from_dict():
def test_delete_environment_field_headers():
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.DeleteEnvironmentRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3244,7 +3485,6 @@ def test_delete_environment_field_headers():
type(client.transport.delete_environment), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
-
client.delete_environment(request)
# Establish that the underlying gRPC stub method was called.
@@ -3259,11 +3499,14 @@ def test_delete_environment_field_headers():
@pytest.mark.asyncio
async def test_delete_environment_field_headers_async():
- client = NotebookServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = service.DeleteEnvironmentRequest()
+
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3273,7 +3516,6 @@ async def test_delete_environment_field_headers_async():
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
-
await client.delete_environment(request)
# Establish that the underlying gRPC stub method was called.
@@ -3289,16 +3531,16 @@ async def test_delete_environment_field_headers_async():
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.NotebookServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.NotebookServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = NotebookServiceClient(
@@ -3308,7 +3550,7 @@ def test_credentials_transport_error():
# It is an error to provide scopes and a transport instance.
transport = transports.NotebookServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = NotebookServiceClient(
@@ -3319,7 +3561,7 @@ def test_credentials_transport_error():
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.NotebookServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
client = NotebookServiceClient(transport=transport)
assert client.transport is transport
@@ -3328,13 +3570,13 @@ def test_transport_instance():
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.NotebookServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.NotebookServiceGrpcAsyncIOTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@@ -3349,23 +3591,23 @@ def test_transport_get_channel():
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
- client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
+ client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.NotebookServiceGrpcTransport,)
def test_notebook_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
- with pytest.raises(exceptions.DuplicateCredentialArgs):
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.NotebookServiceTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
@@ -3377,7 +3619,7 @@ def test_notebook_service_base_transport():
) as Transport:
Transport.return_value = None
transport = transports.NotebookServiceTransport(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
@@ -3413,15 +3655,37 @@ def test_notebook_service_base_transport():
transport.operations_client
+@requires_google_auth_gte_1_25_0
def test_notebook_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
- auth, "load_credentials_from_file"
+ google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
- load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport = transports.NotebookServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_notebook_service_base_transport_with_credentials_file_old_google_auth():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.NotebookServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
@@ -3434,19 +3698,33 @@ def test_notebook_service_base_transport_with_credentials_file():
def test_notebook_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(auth, "default") as adc, mock.patch(
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.NotebookServiceTransport()
adc.assert_called_once()
+@requires_google_auth_gte_1_25_0
def test_notebook_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ NotebookServiceClient()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_notebook_service_auth_adc_old_google_auth():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
NotebookServiceClient()
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
@@ -3454,23 +3732,204 @@ def test_notebook_service_auth_adc():
)
-def test_notebook_service_transport_auth_adc():
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.NotebookServiceGrpcTransport,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_gte_1_25_0
+def test_notebook_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- transports.NotebookServiceGrpcTransport(
- host="squid.clam.whelk", quota_project_id="octopus"
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+ adc.assert_called_once_with(
+ scopes=["1", "2"],
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
)
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.NotebookServiceGrpcTransport,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_lt_1_25_0
+def test_notebook_service_transport_auth_adc_old_google_auth(transport_class):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.NotebookServiceGrpcTransport, grpc_helpers),
+ (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+@requires_api_core_gte_1_26_0
+def test_notebook_service_transport_create_channel(transport_class, grpc_helpers):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+ create_channel.assert_called_with(
+ "notebooks.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ scopes=["1", "2"],
+ default_host="notebooks.googleapis.com",
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.NotebookServiceGrpcTransport, grpc_helpers),
+ (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+@requires_api_core_lt_1_26_0
+def test_notebook_service_transport_create_channel_old_api_core(
+ transport_class, grpc_helpers
+):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+ transport_class(quota_project_id="octopus")
+
+ create_channel.assert_called_with(
+ "notebooks.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.NotebookServiceGrpcTransport, grpc_helpers),
+ (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+@requires_api_core_lt_1_26_0
+def test_notebook_service_transport_create_channel_user_scopes(
+ transport_class, grpc_helpers
+):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+ create_channel.assert_called_with(
+ "notebooks.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ scopes=["1", "2"],
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.NotebookServiceGrpcTransport,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_notebook_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = ga_credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_notebook_service_host_no_port():
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="notebooks.googleapis.com"
),
@@ -3480,7 +3939,7 @@ def test_notebook_service_host_no_port():
def test_notebook_service_host_with_port():
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(),
+ credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="notebooks.googleapis.com:8000"
),
@@ -3489,7 +3948,7 @@ def test_notebook_service_host_with_port():
def test_notebook_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.NotebookServiceGrpcTransport(
@@ -3501,7 +3960,7 @@ def test_notebook_service_grpc_transport_channel():
def test_notebook_service_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.NotebookServiceGrpcAsyncIOTransport(
@@ -3512,6 +3971,8 @@ def test_notebook_service_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -3526,7 +3987,7 @@ def test_notebook_service_transport_channel_mtls_with_client_cert_source(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -3534,9 +3995,9 @@ def test_notebook_service_transport_channel_mtls_with_client_cert_source(
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
- cred = credentials.AnonymousCredentials()
+ cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
- with mock.patch.object(auth, "default") as adc:
+ with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
@@ -3564,6 +4025,8 @@ def test_notebook_service_transport_channel_mtls_with_client_cert_source(
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -3579,7 +4042,7 @@ def test_notebook_service_transport_channel_mtls_with_adc(transport_class):
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
@@ -3610,7 +4073,7 @@ def test_notebook_service_transport_channel_mtls_with_adc(transport_class):
def test_notebook_service_grpc_lro_client():
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
@@ -3623,7 +4086,7 @@ def test_notebook_service_grpc_lro_client():
def test_notebook_service_grpc_lro_async_client():
client = NotebookServiceAsyncClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
@@ -3637,7 +4100,6 @@ def test_notebook_service_grpc_lro_async_client():
def test_environment_path():
project = "squid"
environment = "clam"
-
expected = "projects/{project}/environments/{environment}".format(
project=project, environment=environment,
)
@@ -3660,7 +4122,6 @@ def test_parse_environment_path():
def test_instance_path():
project = "oyster"
instance = "nudibranch"
-
expected = "projects/{project}/instances/{instance}".format(
project=project, instance=instance,
)
@@ -3682,7 +4143,6 @@ def test_parse_instance_path():
def test_common_billing_account_path():
billing_account = "winkle"
-
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@@ -3703,7 +4163,6 @@ def test_parse_common_billing_account_path():
def test_common_folder_path():
folder = "scallop"
-
expected = "folders/{folder}".format(folder=folder,)
actual = NotebookServiceClient.common_folder_path(folder)
assert expected == actual
@@ -3722,7 +4181,6 @@ def test_parse_common_folder_path():
def test_common_organization_path():
organization = "squid"
-
expected = "organizations/{organization}".format(organization=organization,)
actual = NotebookServiceClient.common_organization_path(organization)
assert expected == actual
@@ -3741,7 +4199,6 @@ def test_parse_common_organization_path():
def test_common_project_path():
project = "whelk"
-
expected = "projects/{project}".format(project=project,)
actual = NotebookServiceClient.common_project_path(project)
assert expected == actual
@@ -3761,7 +4218,6 @@ def test_parse_common_project_path():
def test_common_location_path():
project = "oyster"
location = "nudibranch"
-
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@@ -3788,7 +4244,7 @@ def test_client_withDEFAULT_CLIENT_INFO():
transports.NotebookServiceTransport, "_prep_wrapped_messages"
) as prep:
client = NotebookServiceClient(
- credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@@ -3797,6 +4253,6 @@ def test_client_withDEFAULT_CLIENT_INFO():
) as prep:
transport_class = NotebookServiceClient.get_transport_class()
transport = transport_class(
- credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)